[ 513.187229] env[68638]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=68638) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 513.187644] env[68638]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=68638) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 513.187685] env[68638]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=68638) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 513.188025] env[68638]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 513.285910] env[68638]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=68638) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 513.295815] env[68638]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=68638) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 513.339584] env[68638]: INFO oslo_service.periodic_task [-] Skipping periodic task _heal_instance_info_cache because its interval is negative [ 513.897836] env[68638]: INFO nova.virt.driver [None req-0b43fdbe-1c12-42fb-abfe-d0da6f834fc3 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 513.968023] env[68638]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 513.968199] env[68638]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 513.968297] env[68638]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=68638) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 516.844188] env[68638]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-d02a6c2f-54d2-4156-baef-38fd6db5720a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.860406] env[68638]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=68638) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 516.860538] env[68638]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-2d8bfa18-6d53-4a1e-8d93-b71fd640877d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.886050] env[68638]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 04726. [ 516.886222] env[68638]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 2.918s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 516.886697] env[68638]: INFO nova.virt.vmwareapi.driver [None req-0b43fdbe-1c12-42fb-abfe-d0da6f834fc3 None None] VMware vCenter version: 7.0.3 [ 516.890440] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c995b54-c621-405b-91fb-2700aebd4afe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.907601] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd1df60-29c9-4866-903e-b1aed2d472bc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.913089] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c323b56e-1bca-4f21-be5a-810dfd867812 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.919744] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9840cee0-eb6d-43cf-9d0c-ef5a914af4d7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.932660] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e12c6fe-155c-4610-ae86-6a1543e1994f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.938340] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e1cf28-b260-4a43-9f6d-bad382aeff51 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.968536] env[68638]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-6d8e4e2d-8a1c-44e4-a8c0-92fe67f76372 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.973547] env[68638]: DEBUG nova.virt.vmwareapi.driver [None req-0b43fdbe-1c12-42fb-abfe-d0da6f834fc3 None None] Extension org.openstack.compute already exists. {{(pid=68638) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 516.976188] env[68638]: INFO nova.compute.provider_config [None req-0b43fdbe-1c12-42fb-abfe-d0da6f834fc3 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 517.479213] env[68638]: DEBUG nova.context [None req-0b43fdbe-1c12-42fb-abfe-d0da6f834fc3 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),b8ccb1dd-f5bf-4963-801c-634688783be5(cell1) {{(pid=68638) load_cells /opt/stack/nova/nova/context.py:464}} [ 517.481512] env[68638]: DEBUG oslo_concurrency.lockutils [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 517.481764] env[68638]: DEBUG oslo_concurrency.lockutils [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 517.482448] env[68638]: DEBUG oslo_concurrency.lockutils [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 517.482904] env[68638]: DEBUG oslo_concurrency.lockutils [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] Acquiring lock "b8ccb1dd-f5bf-4963-801c-634688783be5" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 517.483103] env[68638]: DEBUG oslo_concurrency.lockutils [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] Lock "b8ccb1dd-f5bf-4963-801c-634688783be5" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 517.484118] env[68638]: DEBUG oslo_concurrency.lockutils [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] Lock "b8ccb1dd-f5bf-4963-801c-634688783be5" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 517.504930] env[68638]: INFO dbcounter [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] Registered counter for database nova_cell0 [ 517.513858] env[68638]: INFO dbcounter [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] Registered counter for database nova_cell1 [ 517.947459] env[68638]: DEBUG oslo_db.sqlalchemy.engines [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68638) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 517.947821] env[68638]: DEBUG oslo_db.sqlalchemy.engines [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68638) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 517.952729] env[68638]: ERROR nova.db.main.api [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 517.952729] env[68638]: result = function(*args, **kwargs) [ 517.952729] env[68638]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 517.952729] env[68638]: return func(*args, **kwargs) [ 517.952729] env[68638]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 517.952729] env[68638]: result = fn(*args, **kwargs) [ 517.952729] env[68638]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 517.952729] env[68638]: return f(*args, **kwargs) [ 517.952729] env[68638]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 517.952729] env[68638]: return db.service_get_minimum_version(context, binaries) [ 517.952729] env[68638]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 517.952729] env[68638]: _check_db_access() [ 517.952729] env[68638]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 517.952729] env[68638]: stacktrace = ''.join(traceback.format_stack()) [ 517.952729] env[68638]: [ 517.953859] env[68638]: ERROR nova.db.main.api [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 517.953859] env[68638]: result = function(*args, **kwargs) [ 517.953859] env[68638]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 517.953859] env[68638]: return func(*args, **kwargs) [ 517.953859] env[68638]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 517.953859] env[68638]: result = fn(*args, **kwargs) [ 517.953859] env[68638]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 517.953859] env[68638]: return f(*args, **kwargs) [ 517.953859] env[68638]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 517.953859] env[68638]: return db.service_get_minimum_version(context, binaries) [ 517.953859] env[68638]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 517.953859] env[68638]: _check_db_access() [ 517.953859] env[68638]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 517.953859] env[68638]: stacktrace = ''.join(traceback.format_stack()) [ 517.953859] env[68638]: [ 517.954674] env[68638]: WARNING nova.objects.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] Failed to get minimum service version for cell b8ccb1dd-f5bf-4963-801c-634688783be5 [ 517.954674] env[68638]: WARNING nova.objects.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 517.954883] env[68638]: DEBUG oslo_concurrency.lockutils [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] Acquiring lock "singleton_lock" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 517.954998] env[68638]: DEBUG oslo_concurrency.lockutils [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] Acquired lock "singleton_lock" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 517.955261] env[68638]: DEBUG oslo_concurrency.lockutils [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] Releasing lock "singleton_lock" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 517.955598] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] Full set of CONF: {{(pid=68638) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/service.py:357}} [ 517.955741] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ******************************************************************************** {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 517.955867] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] Configuration options gathered from: {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 517.956015] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 517.956223] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 517.956351] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ================================================================================ {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 517.956562] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] allow_resize_to_same_host = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.956731] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] arq_binding_timeout = 300 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.956859] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] backdoor_port = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.956984] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] backdoor_socket = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.957162] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] block_device_allocate_retries = 60 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.957324] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] block_device_allocate_retries_interval = 3 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.957492] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cert = self.pem {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.957655] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.957819] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] compute_monitors = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.958222] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] config_dir = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.958411] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] config_drive_format = iso9660 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.958584] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.958716] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] config_source = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.958884] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] console_host = devstack {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.959067] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] control_exchange = nova {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.959231] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cpu_allocation_ratio = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.959390] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] daemon = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.959556] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] debug = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.959711] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] default_access_ip_network_name = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.959873] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] default_availability_zone = nova {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.960036] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] default_ephemeral_format = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.960226] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] default_green_pool_size = 1000 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.960467] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.960652] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] default_schedule_zone = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.960785] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] disk_allocation_ratio = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.960940] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] enable_new_services = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.961128] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] enabled_apis = ['osapi_compute'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.961312] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] enabled_ssl_apis = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.961475] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] flat_injected = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.961632] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] force_config_drive = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.961791] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] force_raw_images = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.961958] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] graceful_shutdown_timeout = 5 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.962130] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] heal_instance_info_cache_interval = -1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.962350] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] host = cpu-1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.962550] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.962722] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] initial_disk_allocation_ratio = 1.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.962883] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] initial_ram_allocation_ratio = 1.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.963112] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.963277] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] instance_build_timeout = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.963438] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] instance_delete_interval = 300 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.963602] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] instance_format = [instance: %(uuid)s] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.963812] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] instance_name_template = instance-%08x {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.964015] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] instance_usage_audit = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.964199] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] instance_usage_audit_period = month {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.964367] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.964535] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] instances_path = /opt/stack/data/nova/instances {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.964700] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] internal_service_availability_zone = internal {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.964851] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] key = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.965015] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] live_migration_retry_count = 30 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.965188] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] log_color = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.965351] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] log_config_append = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.965514] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.965670] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] log_dir = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.965824] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] log_file = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.965949] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] log_options = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.966120] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] log_rotate_interval = 1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.966287] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] log_rotate_interval_type = days {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.966453] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] log_rotation_type = none {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.966614] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.966696] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.966859] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.967031] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.967162] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.967324] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] long_rpc_timeout = 1800 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.967471] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] max_concurrent_builds = 10 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.967627] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] max_concurrent_live_migrations = 1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.967779] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] max_concurrent_snapshots = 5 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.967935] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] max_local_block_devices = 3 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.968122] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] max_logfile_count = 30 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.968297] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] max_logfile_size_mb = 200 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.968459] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] maximum_instance_delete_attempts = 5 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.968625] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] metadata_listen = 0.0.0.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.968787] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] metadata_listen_port = 8775 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.968951] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] metadata_workers = 2 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.969122] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] migrate_max_retries = -1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.969287] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] mkisofs_cmd = genisoimage {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.969489] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] my_block_storage_ip = 10.180.1.21 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.969617] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] my_ip = 10.180.1.21 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.969814] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.969971] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] network_allocate_retries = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.970157] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.970351] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] osapi_compute_listen = 0.0.0.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.970518] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] osapi_compute_listen_port = 8774 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.970722] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] osapi_compute_unique_server_name_scope = {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.970849] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] osapi_compute_workers = 2 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.971026] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] password_length = 12 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.971198] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] periodic_enable = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.971373] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] periodic_fuzzy_delay = 60 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.971542] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] pointer_model = usbtablet {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.971707] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] preallocate_images = none {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.971865] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] publish_errors = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.971990] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] pybasedir = /opt/stack/nova {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.972162] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ram_allocation_ratio = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.972319] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] rate_limit_burst = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.972483] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] rate_limit_except_level = CRITICAL {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.972667] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] rate_limit_interval = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.972827] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] reboot_timeout = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.972986] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] reclaim_instance_interval = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.973155] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] record = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.973320] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] reimage_timeout_per_gb = 60 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.973485] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] report_interval = 120 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.973644] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] rescue_timeout = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.973798] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] reserved_host_cpus = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.973956] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] reserved_host_disk_mb = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.974121] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] reserved_host_memory_mb = 512 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.974279] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] reserved_huge_pages = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.974437] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] resize_confirm_window = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.974591] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] resize_fs_using_block_device = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.974747] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] resume_guests_state_on_host_boot = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.974911] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.975081] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] rpc_response_timeout = 60 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.975237] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] run_external_periodic_tasks = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.975400] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] running_deleted_instance_action = reap {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.975556] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] running_deleted_instance_poll_interval = 1800 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.975710] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] running_deleted_instance_timeout = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.975897] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] scheduler_instance_sync_interval = 120 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.976079] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] service_down_time = 720 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.976250] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] servicegroup_driver = db {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.976408] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] shell_completion = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.976563] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] shelved_offload_time = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.976709] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] shelved_poll_interval = 3600 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.976871] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] shutdown_timeout = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.977036] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] source_is_ipv6 = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.977196] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ssl_only = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.977442] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.977612] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] sync_power_state_interval = 600 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.977771] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] sync_power_state_pool_size = 1000 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.977938] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] syslog_log_facility = LOG_USER {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.978104] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] tempdir = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.978262] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] timeout_nbd = 10 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.978430] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] transport_url = **** {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.978587] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] update_resources_interval = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.978743] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] use_cow_images = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.978897] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] use_journal = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.979063] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] use_json = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.979222] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] use_rootwrap_daemon = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.979378] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] use_stderr = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.979527] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] use_syslog = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.979678] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vcpu_pin_set = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.979839] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vif_plugging_is_fatal = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.979999] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vif_plugging_timeout = 300 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.980182] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] virt_mkfs = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.980363] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] volume_usage_poll_interval = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.980521] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] watch_log_file = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.980685] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] web = /usr/share/spice-html5 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 517.980871] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.981043] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.981225] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.981414] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_concurrency.disable_process_locking = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.981714] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.981896] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.982074] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.982245] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.982417] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.982604] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.982791] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api.auth_strategy = keystone {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.982953] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api.compute_link_prefix = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.983143] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.983315] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api.dhcp_domain = novalocal {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.983482] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api.enable_instance_password = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.983641] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api.glance_link_prefix = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.983799] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.983966] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.984139] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api.instance_list_per_project_cells = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.984298] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api.list_records_by_skipping_down_cells = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.984459] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api.local_metadata_per_cell = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.984621] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api.max_limit = 1000 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.984784] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api.metadata_cache_expiration = 15 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.984956] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api.neutron_default_tenant_id = default {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.985199] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api.response_validation = warn {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.985307] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api.use_neutron_default_nets = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.985474] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.985639] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.985800] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.985968] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.986146] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api.vendordata_dynamic_targets = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.986309] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api.vendordata_jsonfile_path = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.986485] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.986676] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.backend = dogpile.cache.memcached {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.986847] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.backend_argument = **** {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.986993] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.backend_expiration_time = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.987175] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.config_prefix = cache.oslo {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.987345] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.dead_timeout = 60.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.987506] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.debug_cache_backend = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.987663] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.enable_retry_client = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.987834] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.enable_socket_keepalive = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.988022] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.enabled = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.988184] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.enforce_fips_mode = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.988348] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.expiration_time = 600 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.988508] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.hashclient_retry_attempts = 2 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.988669] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.hashclient_retry_delay = 1.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.988829] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.memcache_dead_retry = 300 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.988982] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.memcache_password = **** {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.989157] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.989318] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.989477] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.memcache_pool_maxsize = 10 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.989633] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.989788] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.memcache_sasl_enabled = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.989966] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.990136] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.memcache_socket_timeout = 1.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.990322] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.memcache_username = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.990491] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.proxies = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.990654] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.redis_db = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.990812] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.redis_password = **** {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.990982] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.redis_sentinel_service_name = mymaster {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.991173] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.991342] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.redis_server = localhost:6379 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.991510] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.redis_socket_timeout = 1.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.991667] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.redis_username = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.991826] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.retry_attempts = 2 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.991992] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.retry_delay = 0.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.992168] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.socket_keepalive_count = 1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.992331] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.socket_keepalive_idle = 1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.992497] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.socket_keepalive_interval = 1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.992681] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.tls_allowed_ciphers = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.992843] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.tls_cafile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.992999] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.tls_certfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.993176] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.tls_enabled = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.993334] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cache.tls_keyfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.993566] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cinder.auth_section = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.993676] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cinder.auth_type = password {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.993834] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cinder.cafile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.994012] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cinder.catalog_info = volumev3::publicURL {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.994180] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cinder.certfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.994345] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cinder.collect_timing = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.994503] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cinder.cross_az_attach = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.994660] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cinder.debug = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.994815] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cinder.endpoint_template = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.994973] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cinder.http_retries = 3 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.995144] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cinder.insecure = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.995307] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cinder.keyfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.995463] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cinder.os_region_name = RegionOne {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.995622] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cinder.split_loggers = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.995776] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cinder.timeout = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.995942] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.996112] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] compute.cpu_dedicated_set = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.996270] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] compute.cpu_shared_set = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.996432] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] compute.image_type_exclude_list = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.996590] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.996749] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] compute.max_concurrent_disk_ops = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.996904] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] compute.max_disk_devices_to_attach = -1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.997071] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.997251] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.997415] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] compute.resource_provider_association_refresh = 300 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.997573] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.997732] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] compute.shutdown_retry_interval = 10 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.997908] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.998105] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] conductor.workers = 2 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.998300] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] console.allowed_origins = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.998465] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] console.ssl_ciphers = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.998633] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] console.ssl_minimum_version = default {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.998796] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] consoleauth.enforce_session_timeout = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.998959] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] consoleauth.token_ttl = 600 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.999141] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cyborg.cafile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.999299] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cyborg.certfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.999460] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cyborg.collect_timing = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.999615] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cyborg.connect_retries = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.999772] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cyborg.connect_retry_delay = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 517.999927] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cyborg.endpoint_override = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.000094] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cyborg.insecure = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.000282] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cyborg.keyfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.000448] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cyborg.max_version = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.000606] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cyborg.min_version = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.000758] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cyborg.region_name = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.000911] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cyborg.retriable_status_codes = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.001076] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cyborg.service_name = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.001243] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cyborg.service_type = accelerator {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.001403] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cyborg.split_loggers = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.001556] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cyborg.status_code_retries = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.001709] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cyborg.status_code_retry_delay = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.001864] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cyborg.timeout = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.002047] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.002209] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] cyborg.version = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.002379] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] database.asyncio_connection = **** {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.002557] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] database.asyncio_slave_connection = **** {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.002743] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] database.backend = sqlalchemy {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.002916] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] database.connection = **** {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.003095] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] database.connection_debug = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.003271] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] database.connection_parameters = {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.003435] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] database.connection_recycle_time = 3600 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.003597] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] database.connection_trace = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.003757] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] database.db_inc_retry_interval = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.003918] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] database.db_max_retries = 20 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.004092] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] database.db_max_retry_interval = 10 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.004257] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] database.db_retry_interval = 1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.004417] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] database.max_overflow = 50 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.004578] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] database.max_pool_size = 5 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.004736] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] database.max_retries = 10 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.004901] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.005069] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] database.mysql_wsrep_sync_wait = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.005229] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] database.pool_timeout = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.005418] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] database.retry_interval = 10 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.005540] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] database.slave_connection = **** {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.005696] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] database.sqlite_synchronous = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.005854] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] database.use_db_reconnect = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.006027] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api_database.asyncio_connection = **** {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.006191] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api_database.asyncio_slave_connection = **** {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.006360] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api_database.backend = sqlalchemy {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.006524] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api_database.connection = **** {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.006686] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api_database.connection_debug = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.006852] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api_database.connection_parameters = {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.007014] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api_database.connection_recycle_time = 3600 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.007179] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api_database.connection_trace = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.007337] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api_database.db_inc_retry_interval = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.007496] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api_database.db_max_retries = 20 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.007656] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api_database.db_max_retry_interval = 10 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.007812] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api_database.db_retry_interval = 1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.007968] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api_database.max_overflow = 50 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.008163] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api_database.max_pool_size = 5 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.008330] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api_database.max_retries = 10 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.008500] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.008658] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.008814] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api_database.pool_timeout = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.008971] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api_database.retry_interval = 10 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.009143] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api_database.slave_connection = **** {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.009300] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] api_database.sqlite_synchronous = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.009472] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] devices.enabled_mdev_types = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.009647] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.009815] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ephemeral_storage_encryption.default_format = luks {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.009974] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ephemeral_storage_encryption.enabled = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.010146] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.010344] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.api_servers = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.010513] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.cafile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.010674] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.certfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.010834] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.collect_timing = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.010989] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.connect_retries = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.011166] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.connect_retry_delay = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.011326] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.debug = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.011492] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.default_trusted_certificate_ids = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.011652] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.enable_certificate_validation = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.011810] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.enable_rbd_download = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.011965] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.endpoint_override = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.012145] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.insecure = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.012302] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.keyfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.012462] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.max_version = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.012640] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.min_version = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.012806] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.num_retries = 3 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.012973] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.rbd_ceph_conf = {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.013147] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.rbd_connect_timeout = 5 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.013318] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.rbd_pool = {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.013486] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.rbd_user = {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.013653] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.region_name = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.013807] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.retriable_status_codes = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.013964] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.service_name = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.014143] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.service_type = image {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.014303] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.split_loggers = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.014459] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.status_code_retries = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.014614] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.status_code_retry_delay = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.014770] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.timeout = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.014952] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.015122] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.verify_glance_signatures = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.015281] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] glance.version = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.015447] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] guestfs.debug = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.015614] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] manila.auth_section = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.015773] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] manila.auth_type = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.015930] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] manila.cafile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.016101] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] manila.certfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.016265] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] manila.collect_timing = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.016426] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] manila.connect_retries = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.016579] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] manila.connect_retry_delay = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.016733] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] manila.endpoint_override = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.016893] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] manila.insecure = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.017059] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] manila.keyfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.017215] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] manila.max_version = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.017372] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] manila.min_version = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.017526] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] manila.region_name = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.017681] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] manila.retriable_status_codes = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.017834] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] manila.service_name = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.018007] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] manila.service_type = shared-file-system {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.018200] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] manila.share_apply_policy_timeout = 10 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.018371] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] manila.split_loggers = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.018530] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] manila.status_code_retries = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.018687] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] manila.status_code_retry_delay = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.018841] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] manila.timeout = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.019031] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.019199] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] manila.version = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.019372] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] mks.enabled = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.019712] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.019898] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] image_cache.manager_interval = 2400 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.020077] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] image_cache.precache_concurrency = 1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.020275] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] image_cache.remove_unused_base_images = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.020457] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.020626] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.020800] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] image_cache.subdirectory_name = _base {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.020974] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.api_max_retries = 60 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.021151] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.api_retry_interval = 2 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.021307] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.auth_section = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.021469] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.auth_type = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.021626] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.cafile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.021780] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.certfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.021940] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.collect_timing = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.022111] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.conductor_group = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.022273] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.connect_retries = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.022433] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.connect_retry_delay = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.022609] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.endpoint_override = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.022776] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.insecure = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.022932] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.keyfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.023102] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.max_version = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.023262] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.min_version = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.023426] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.peer_list = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.023580] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.region_name = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.023734] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.retriable_status_codes = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.023895] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.serial_console_state_timeout = 10 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.024059] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.service_name = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.024232] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.service_type = baremetal {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.024393] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.shard = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.024554] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.split_loggers = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.024710] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.status_code_retries = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.024864] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.status_code_retry_delay = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.025030] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.timeout = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.025215] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.025378] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ironic.version = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.025556] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.025730] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] key_manager.fixed_key = **** {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.025910] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.026098] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican.barbican_api_version = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.026275] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican.barbican_endpoint = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.026452] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican.barbican_endpoint_type = public {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.026613] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican.barbican_region_name = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.026773] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican.cafile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.026928] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican.certfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.027149] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican.collect_timing = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.027311] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican.insecure = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.027474] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican.keyfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.027636] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican.number_of_retries = 60 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.027795] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican.retry_delay = 1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.027956] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican.send_service_user_token = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.028165] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican.split_loggers = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.028348] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican.timeout = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.028514] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican.verify_ssl = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.028672] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican.verify_ssl_path = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.028837] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican_service_user.auth_section = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.028999] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican_service_user.auth_type = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.029188] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican_service_user.cafile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.029364] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican_service_user.certfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.029528] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican_service_user.collect_timing = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.029689] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican_service_user.insecure = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.029844] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican_service_user.keyfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.030009] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican_service_user.split_loggers = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.030176] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] barbican_service_user.timeout = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.030373] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vault.approle_role_id = **** {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.030538] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vault.approle_secret_id = **** {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.030708] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vault.kv_mountpoint = secret {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.030866] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vault.kv_path = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.031040] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vault.kv_version = 2 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.031221] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vault.namespace = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.031391] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vault.root_token_id = **** {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.031547] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vault.ssl_ca_crt_file = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.031712] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vault.timeout = 60.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.031871] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vault.use_ssl = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.032050] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.032252] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] keystone.cafile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.032429] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] keystone.certfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.032649] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] keystone.collect_timing = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.032827] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] keystone.connect_retries = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.032987] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] keystone.connect_retry_delay = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.033180] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] keystone.endpoint_override = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.033360] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] keystone.insecure = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.033518] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] keystone.keyfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.033676] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] keystone.max_version = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.033830] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] keystone.min_version = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.033986] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] keystone.region_name = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.034191] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] keystone.retriable_status_codes = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.034356] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] keystone.service_name = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.034526] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] keystone.service_type = identity {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.034686] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] keystone.split_loggers = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.034841] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] keystone.status_code_retries = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.034997] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] keystone.status_code_retry_delay = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.035168] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] keystone.timeout = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.035346] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.035504] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] keystone.version = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.035688] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.ceph_mount_options = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.036084] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.036275] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.connection_uri = {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.036443] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.cpu_mode = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.036610] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.cpu_model_extra_flags = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.036777] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.cpu_models = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.036946] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.cpu_power_governor_high = performance {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.037129] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.cpu_power_governor_low = powersave {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.037290] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.cpu_power_management = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.037460] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.037628] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.device_detach_attempts = 8 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.037793] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.device_detach_timeout = 20 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.037958] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.disk_cachemodes = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.038167] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.disk_prefix = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.038353] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.enabled_perf_events = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.038523] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.file_backed_memory = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.038695] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.gid_maps = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.038851] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.hw_disk_discard = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.039023] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.hw_machine_type = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.039196] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.images_rbd_ceph_conf = {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.039363] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.039526] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.039696] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.images_rbd_glance_store_name = {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.039865] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.images_rbd_pool = rbd {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.040046] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.images_type = default {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.040241] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.images_volume_group = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.040401] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.inject_key = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.040571] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.inject_partition = -2 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.040733] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.inject_password = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.040896] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.iscsi_iface = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.041067] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.iser_use_multipath = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.041236] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.live_migration_bandwidth = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.041454] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.041683] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.live_migration_downtime = 500 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.041861] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.042041] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.042230] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.live_migration_inbound_addr = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.042403] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.042566] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.live_migration_permit_post_copy = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.042723] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.live_migration_scheme = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.042896] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.live_migration_timeout_action = abort {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.043070] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.live_migration_tunnelled = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.043234] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.live_migration_uri = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.043398] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.live_migration_with_native_tls = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.043562] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.max_queues = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.043727] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.043953] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.044133] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.nfs_mount_options = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.044423] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.044603] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.044770] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.num_iser_scan_tries = 5 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.044932] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.num_memory_encrypted_guests = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.045105] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.045270] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.num_pcie_ports = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.045438] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.num_volume_scan_tries = 5 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.045602] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.pmem_namespaces = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.045759] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.quobyte_client_cfg = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.046058] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.046273] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.rbd_connect_timeout = 5 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.046449] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.046613] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.046777] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.rbd_secret_uuid = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.046935] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.rbd_user = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.047110] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.047294] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.remote_filesystem_transport = ssh {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.047447] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.rescue_image_id = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.047601] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.rescue_kernel_id = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.047754] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.rescue_ramdisk_id = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.047921] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.048090] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.rx_queue_size = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.048264] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.smbfs_mount_options = {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.048562] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.048735] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.snapshot_compression = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.048900] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.snapshot_image_format = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.049132] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.049299] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.sparse_logical_volumes = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.049463] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.swtpm_enabled = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.049630] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.swtpm_group = tss {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.049796] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.swtpm_user = tss {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.049961] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.sysinfo_serial = unique {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.050155] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.tb_cache_size = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.050350] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.tx_queue_size = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.050517] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.uid_maps = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.050676] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.use_virtio_for_bridges = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.050843] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.virt_type = kvm {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.051024] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.volume_clear = zero {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.051188] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.volume_clear_size = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.051351] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.volume_enforce_multipath = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.051529] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.volume_use_multipath = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.051668] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.vzstorage_cache_path = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.051837] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.052008] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.vzstorage_mount_group = qemu {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.052182] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.vzstorage_mount_opts = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.052350] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.052642] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.052822] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.vzstorage_mount_user = stack {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.052989] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.053203] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.auth_section = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.053383] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.auth_type = password {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.053545] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.cafile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.053706] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.certfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.053869] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.collect_timing = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.054049] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.connect_retries = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.054232] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.connect_retry_delay = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.054408] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.default_floating_pool = public {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.054571] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.endpoint_override = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.054734] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.extension_sync_interval = 600 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.054896] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.http_retries = 3 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.055069] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.insecure = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.055233] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.keyfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.055392] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.max_version = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.055560] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.055719] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.min_version = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.055886] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.ovs_bridge = br-int {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.056062] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.physnets = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.056234] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.region_name = RegionOne {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.056393] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.retriable_status_codes = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.056559] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.service_metadata_proxy = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.056719] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.service_name = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.056884] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.service_type = network {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.057053] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.split_loggers = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.057215] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.status_code_retries = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.057376] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.status_code_retry_delay = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.057538] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.timeout = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.057714] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.057872] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] neutron.version = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.058067] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] notifications.bdms_in_notifications = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.058280] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] notifications.default_level = INFO {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.058452] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] notifications.include_share_mapping = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.058629] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] notifications.notification_format = unversioned {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.058794] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] notifications.notify_on_state_change = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.058971] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.059164] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] pci.alias = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.059335] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] pci.device_spec = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.059500] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] pci.report_in_placement = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.059672] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.auth_section = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.059843] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.auth_type = password {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.060017] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.060207] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.cafile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.060375] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.certfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.060542] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.collect_timing = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.060700] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.connect_retries = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.060858] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.connect_retry_delay = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.061024] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.default_domain_id = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.061198] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.default_domain_name = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.061371] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.domain_id = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.061528] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.domain_name = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.061685] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.endpoint_override = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.061842] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.insecure = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.062009] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.keyfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.062202] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.max_version = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.062366] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.min_version = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.062534] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.password = **** {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.062690] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.project_domain_id = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.062854] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.project_domain_name = Default {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.063025] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.project_id = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.063197] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.project_name = service {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.063364] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.region_name = RegionOne {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.063526] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.retriable_status_codes = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.063684] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.service_name = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.063858] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.service_type = placement {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.064030] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.split_loggers = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.064195] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.status_code_retries = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.064357] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.status_code_retry_delay = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.064514] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.system_scope = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.064671] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.timeout = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.064828] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.trust_id = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.064986] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.user_domain_id = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.065185] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.user_domain_name = Default {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.065352] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.user_id = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.065525] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.username = nova {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.065703] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.065862] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] placement.version = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.066063] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] quota.cores = 20 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.066250] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] quota.count_usage_from_placement = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.066427] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.066591] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] quota.injected_file_content_bytes = 10240 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.066753] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] quota.injected_file_path_length = 255 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.066914] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] quota.injected_files = 5 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.067088] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] quota.instances = 10 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.067259] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] quota.key_pairs = 100 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.067427] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] quota.metadata_items = 128 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.067587] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] quota.ram = 51200 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.067747] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] quota.recheck_quota = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.067909] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] quota.server_group_members = 10 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.068094] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] quota.server_groups = 10 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.068318] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] quota.unified_limits_resource_list = ['servers'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.068497] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] quota.unified_limits_resource_strategy = require {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.068668] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.068832] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.068989] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] scheduler.image_metadata_prefilter = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.069188] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.069359] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] scheduler.max_attempts = 3 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.069521] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] scheduler.max_placement_results = 1000 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.069683] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.069840] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] scheduler.query_placement_for_image_type_support = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.069997] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.070233] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] scheduler.workers = 2 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.070423] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.070599] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.070784] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.070955] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.071136] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.071303] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.071469] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.071657] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.071826] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.host_subset_size = 1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.071997] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.072179] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.072345] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.image_props_weight_multiplier = 0.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.072512] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.image_props_weight_setting = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.072684] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.072846] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.isolated_hosts = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.073015] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.isolated_images = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.073183] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.073345] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.073505] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.073666] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.pci_in_placement = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.073823] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.073981] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.074153] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.074312] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.074471] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.074629] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.074787] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.track_instance_changes = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.074958] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.075143] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] metrics.required = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.075304] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] metrics.weight_multiplier = 1.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.075467] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.075627] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] metrics.weight_setting = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.075940] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.076126] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] serial_console.enabled = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.076302] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] serial_console.port_range = 10000:20000 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.076475] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.076645] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.076810] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] serial_console.serialproxy_port = 6083 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.076975] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] service_user.auth_section = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.077160] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] service_user.auth_type = password {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.077320] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] service_user.cafile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.077481] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] service_user.certfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.077682] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] service_user.collect_timing = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.077791] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] service_user.insecure = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.077946] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] service_user.keyfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.078143] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] service_user.send_service_user_token = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.078317] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] service_user.split_loggers = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.078476] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] service_user.timeout = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.078645] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] spice.agent_enabled = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.078806] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] spice.enabled = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.079116] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.079320] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.079492] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] spice.html5proxy_port = 6082 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.079654] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] spice.image_compression = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.079812] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] spice.jpeg_compression = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.079969] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] spice.playback_compression = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.080145] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] spice.require_secure = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.080338] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] spice.server_listen = 127.0.0.1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.080515] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.080790] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] spice.spice_direct_proxy_base_url = http://127.0.0.1:13002/nova {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.080958] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] spice.streaming_mode = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.082144] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] spice.zlib_compression = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.082144] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] upgrade_levels.baseapi = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.082144] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] upgrade_levels.compute = auto {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.082144] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] upgrade_levels.conductor = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.082144] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] upgrade_levels.scheduler = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.082144] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vendordata_dynamic_auth.auth_section = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.082144] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vendordata_dynamic_auth.auth_type = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.082358] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vendordata_dynamic_auth.cafile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.082459] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vendordata_dynamic_auth.certfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.082555] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.082719] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vendordata_dynamic_auth.insecure = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.082882] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vendordata_dynamic_auth.keyfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.083049] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.083213] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vendordata_dynamic_auth.timeout = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.083386] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vmware.api_retry_count = 10 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.083545] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vmware.ca_file = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.083715] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vmware.cache_prefix = devstack-image-cache {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.083879] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vmware.cluster_name = testcl1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.084049] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vmware.connection_pool_size = 10 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.084210] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vmware.console_delay_seconds = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.084378] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vmware.datastore_regex = ^datastore.* {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.084583] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.084751] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vmware.host_password = **** {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.084921] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vmware.host_port = 443 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.085102] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vmware.host_username = administrator@vsphere.local {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.085276] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vmware.insecure = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.085442] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vmware.integration_bridge = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.085639] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vmware.maximum_objects = 100 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.085832] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vmware.pbm_default_policy = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.085999] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vmware.pbm_enabled = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.086173] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vmware.pbm_wsdl_location = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.086379] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.086543] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vmware.serial_port_proxy_uri = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.086703] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vmware.serial_port_service_uri = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.086872] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vmware.task_poll_interval = 0.5 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.087052] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vmware.use_linked_clone = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.087226] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vmware.vnc_keymap = en-us {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.087395] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vmware.vnc_port = 5900 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.087557] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vmware.vnc_port_total = 10000 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.087777] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vnc.auth_schemes = ['none'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.087911] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vnc.enabled = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.088212] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.088400] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.088569] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vnc.novncproxy_port = 6080 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.088757] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vnc.server_listen = 127.0.0.1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.088935] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.089108] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vnc.vencrypt_ca_certs = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.089269] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vnc.vencrypt_client_cert = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.089426] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vnc.vencrypt_client_key = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.089605] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.089771] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] workarounds.disable_deep_image_inspection = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.089928] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.090098] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.090285] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.090458] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] workarounds.disable_rootwrap = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.090620] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] workarounds.enable_numa_live_migration = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.090780] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.090938] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.091111] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.091298] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] workarounds.libvirt_disable_apic = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.091614] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.091788] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.091954] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.092133] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.092301] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.092463] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.092659] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.092781] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.092940] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.093115] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.093299] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.093472] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] wsgi.client_socket_timeout = 900 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.093637] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] wsgi.default_pool_size = 1000 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.093800] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] wsgi.keep_alive = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.093965] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] wsgi.max_header_line = 16384 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.094139] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] wsgi.secure_proxy_ssl_header = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.094302] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] wsgi.ssl_ca_file = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.094462] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] wsgi.ssl_cert_file = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.094620] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] wsgi.ssl_key_file = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.094784] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] wsgi.tcp_keepidle = 600 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.094956] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.095134] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] zvm.ca_file = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.095291] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] zvm.cloud_connector_url = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.095582] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.095755] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] zvm.reachable_timeout = 300 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.095922] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.096111] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.096292] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] profiler.connection_string = messaging:// {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.096488] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] profiler.enabled = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.096624] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] profiler.es_doc_type = notification {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.096787] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] profiler.es_scroll_size = 10000 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.096953] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] profiler.es_scroll_time = 2m {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.097131] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] profiler.filter_error_trace = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.097300] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] profiler.hmac_keys = **** {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.097471] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] profiler.sentinel_service_name = mymaster {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.097635] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] profiler.socket_timeout = 0.1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.097795] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] profiler.trace_requests = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.097954] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] profiler.trace_sqlalchemy = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.098165] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] profiler_jaeger.process_tags = {} {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.098340] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] profiler_jaeger.service_name_prefix = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.098508] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] profiler_otlp.service_name_prefix = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.098670] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] remote_debug.host = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.098828] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] remote_debug.port = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.098999] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.099176] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.099338] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.099500] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.099657] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.099814] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.099971] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.100143] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.100337] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.100515] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.100675] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.100846] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.101018] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.101196] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.101379] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.kombu_reconnect_splay = 0.0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.101551] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.101714] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.101875] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.102056] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.102225] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.102388] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.102552] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.102752] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.102876] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.103044] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.103211] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.103374] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.103536] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.103696] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.103858] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.104028] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.ssl = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.104204] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.104377] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.104539] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.104705] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.104872] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.ssl_version = {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.105040] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.105227] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.105396] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_notifications.retry = -1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.105571] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.105739] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_messaging_notifications.transport_url = **** {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.105917] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.auth_section = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.106087] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.auth_type = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.106248] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.cafile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.106405] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.certfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.106596] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.collect_timing = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.106714] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.connect_retries = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.106867] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.connect_retry_delay = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.107030] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.endpoint_id = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.107204] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.endpoint_interface = publicURL {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.107367] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.endpoint_override = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.107516] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.endpoint_region_name = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.107669] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.endpoint_service_name = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.107824] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.endpoint_service_type = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.107999] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.insecure = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.108166] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.keyfile = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.108336] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.max_version = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.108492] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.min_version = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.108647] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.region_name = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.108803] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.retriable_status_codes = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.108959] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.service_name = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.109124] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.service_type = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.109285] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.split_loggers = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.109443] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.status_code_retries = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.109600] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.status_code_retry_delay = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.109755] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.timeout = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.109909] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.valid_interfaces = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.110073] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_limit.version = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.110268] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_reports.file_event_handler = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.110445] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.110603] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] oslo_reports.log_dir = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.110771] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.110928] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.111098] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.111285] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.111459] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.111626] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.111860] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.112045] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vif_plug_ovs_privileged.group = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.112214] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.112382] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.112543] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.112701] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] vif_plug_ovs_privileged.user = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.112868] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] os_vif_linux_bridge.flat_interface = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.113053] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.113233] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.113403] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.113570] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.113739] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.113903] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.114074] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.114256] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.114430] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] os_vif_ovs.isolate_vif = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.114602] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.114768] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.114933] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.115117] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] os_vif_ovs.ovsdb_interface = native {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.115280] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] os_vif_ovs.per_port_bridge = False {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.115504] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] privsep_osbrick.capabilities = [21] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.115685] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] privsep_osbrick.group = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.115845] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] privsep_osbrick.helper_command = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.116015] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.116217] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.116383] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] privsep_osbrick.user = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.116556] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.116715] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] nova_sys_admin.group = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.116870] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] nova_sys_admin.helper_command = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.117046] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.117215] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.117379] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] nova_sys_admin.user = None {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 518.117508] env[68638]: DEBUG oslo_service.backend.eventlet.service [None req-19e2c4c7-ea10-4e12-a395-f591f1bee7c3 None None] ******************************************************************************** {{(pid=68638) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 518.117920] env[68638]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 518.621489] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] Getting list of instances from cluster (obj){ [ 518.621489] env[68638]: value = "domain-c8" [ 518.621489] env[68638]: _type = "ClusterComputeResource" [ 518.621489] env[68638]: } {{(pid=68638) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 518.622617] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9870e1c4-ce2a-47ee-a491-5f487862143e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.631469] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] Got total of 0 instances {{(pid=68638) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 518.632043] env[68638]: WARNING nova.virt.vmwareapi.driver [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 518.632538] env[68638]: INFO nova.virt.node [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] Generated node identity a03d7c1f-9953-43da-98b9-91e5cea1f9ff [ 518.632752] env[68638]: INFO nova.virt.node [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] Wrote node identity a03d7c1f-9953-43da-98b9-91e5cea1f9ff to /opt/stack/data/n-cpu-1/compute_id [ 519.135790] env[68638]: WARNING nova.compute.manager [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] Compute nodes ['a03d7c1f-9953-43da-98b9-91e5cea1f9ff'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 520.142274] env[68638]: INFO nova.compute.manager [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 521.148037] env[68638]: WARNING nova.compute.manager [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 521.148448] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 521.148514] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 521.148648] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 521.148802] env[68638]: DEBUG nova.compute.resource_tracker [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68638) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 521.149738] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ad4c52-9373-4125-bc2c-537d2b466ed0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.158103] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e3bf65a-e1e5-4dcc-b541-8e188341babf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.172567] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dd38de4-2cc5-4a04-9af1-eeb886a5c76f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.178838] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e32942c-9d80-4279-b1ae-c94d877c257a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.207230] env[68638]: DEBUG nova.compute.resource_tracker [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181081MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=68638) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 521.207375] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 521.207575] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 521.710583] env[68638]: WARNING nova.compute.resource_tracker [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] No compute node record for cpu-1:a03d7c1f-9953-43da-98b9-91e5cea1f9ff: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host a03d7c1f-9953-43da-98b9-91e5cea1f9ff could not be found. [ 522.214309] env[68638]: INFO nova.compute.resource_tracker [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: a03d7c1f-9953-43da-98b9-91e5cea1f9ff [ 523.723212] env[68638]: DEBUG nova.compute.resource_tracker [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 523.723687] env[68638]: DEBUG nova.compute.resource_tracker [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 523.877699] env[68638]: INFO nova.scheduler.client.report [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] [req-f4d1efa2-ea3d-4fde-9504-46ec6dc68c2f] Created resource provider record via placement API for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 523.897251] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b659b5e-3f7d-4628-9d35-5bf3595c78f6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.906103] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ff031a5-849b-4112-bc82-d0b5dbec2a38 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.940171] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53ac7372-b06b-4f9c-80ec-676be0305d62 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.948461] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a36053-1a12-4dfe-9c66-61b1da257412 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.963440] env[68638]: DEBUG nova.compute.provider_tree [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 524.501975] env[68638]: DEBUG nova.scheduler.client.report [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 524.502216] env[68638]: DEBUG nova.compute.provider_tree [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 0 to 1 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 524.502352] env[68638]: DEBUG nova.compute.provider_tree [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 524.551016] env[68638]: DEBUG nova.compute.provider_tree [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 1 to 2 during operation: update_traits {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 525.056396] env[68638]: DEBUG nova.compute.resource_tracker [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68638) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 525.056396] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.847s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 525.056396] env[68638]: DEBUG nova.service [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] Creating RPC server for service compute {{(pid=68638) start /opt/stack/nova/nova/service.py:186}} [ 525.068477] env[68638]: DEBUG nova.service [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] Join ServiceGroup membership for this service compute {{(pid=68638) start /opt/stack/nova/nova/service.py:203}} [ 525.068690] env[68638]: DEBUG nova.servicegroup.drivers.db [None req-e2feb8c5-0120-4e96-9f40-0d97f10a5cbf None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=68638) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 563.072695] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._sync_power_states {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 563.465426] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Acquiring lock "05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 563.465540] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Lock "05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 563.580332] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Getting list of instances from cluster (obj){ [ 563.580332] env[68638]: value = "domain-c8" [ 563.580332] env[68638]: _type = "ClusterComputeResource" [ 563.580332] env[68638]: } {{(pid=68638) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 563.581695] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af3c7b93-690e-4ecd-9d97-7f9a22b39e16 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.595670] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Got total of 0 instances {{(pid=68638) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 563.595670] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 563.596434] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Getting list of instances from cluster (obj){ [ 563.596434] env[68638]: value = "domain-c8" [ 563.596434] env[68638]: _type = "ClusterComputeResource" [ 563.596434] env[68638]: } {{(pid=68638) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 563.597494] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9313364-f1cd-4247-832b-3b338242d083 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.605503] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Got total of 0 instances {{(pid=68638) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 563.970725] env[68638]: DEBUG nova.compute.manager [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 564.530207] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 564.530207] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.003s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 564.534653] env[68638]: INFO nova.compute.claims [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 565.599705] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3531460-800b-43f7-bd8f-cd4a70c7a034 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.612558] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-998ad43d-9875-4a82-8db8-da0b1976aa1f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.652415] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8669bc88-5150-49e5-a6d8-5fe1b9798b58 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.663780] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db7bceba-ff97-4228-9dc0-8edfa2515418 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.681093] env[68638]: DEBUG nova.compute.provider_tree [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 566.114944] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Acquiring lock "4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 566.115363] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Lock "4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 566.184794] env[68638]: DEBUG nova.scheduler.client.report [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 566.619360] env[68638]: DEBUG nova.compute.manager [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 566.690299] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.161s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 566.692064] env[68638]: DEBUG nova.compute.manager [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 567.153516] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 567.153516] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 567.156696] env[68638]: INFO nova.compute.claims [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 567.197108] env[68638]: DEBUG nova.compute.utils [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 567.199606] env[68638]: DEBUG nova.compute.manager [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 567.202168] env[68638]: DEBUG nova.network.neutron [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 567.478274] env[68638]: DEBUG nova.policy [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c89e1210c3fb46d6b617655fac7c6d53', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f54818a99fac4274befb43a064c49c31', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 567.709259] env[68638]: DEBUG nova.compute.manager [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 568.259105] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf4f799-a174-4a19-ada8-b397d0fb220b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.273037] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6436b99-8959-40e9-ab17-be1a01df505f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.305410] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f574c447-1125-43f9-8740-490196bfd869 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.313061] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6884716-d112-45b7-bc43-567e890c89a4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.333942] env[68638]: DEBUG nova.compute.provider_tree [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 568.612218] env[68638]: DEBUG oslo_concurrency.lockutils [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Acquiring lock "54af9c38-c8b6-4ef9-be63-de545dcc0da5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 568.612444] env[68638]: DEBUG oslo_concurrency.lockutils [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Lock "54af9c38-c8b6-4ef9-be63-de545dcc0da5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 568.723729] env[68638]: DEBUG nova.compute.manager [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 568.760134] env[68638]: DEBUG nova.virt.hardware [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 568.760482] env[68638]: DEBUG nova.virt.hardware [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 568.760549] env[68638]: DEBUG nova.virt.hardware [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 568.760705] env[68638]: DEBUG nova.virt.hardware [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 568.760902] env[68638]: DEBUG nova.virt.hardware [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 568.761985] env[68638]: DEBUG nova.virt.hardware [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 568.763101] env[68638]: DEBUG nova.virt.hardware [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 568.763101] env[68638]: DEBUG nova.virt.hardware [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 568.763269] env[68638]: DEBUG nova.virt.hardware [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 568.763414] env[68638]: DEBUG nova.virt.hardware [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 568.763601] env[68638]: DEBUG nova.virt.hardware [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 568.764524] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ac658e-9e6b-4328-8369-b996683f6739 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.780573] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f76965e2-03d5-444e-8c2f-d3baaab84b01 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.800750] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91f7174b-9fbd-4a79-9424-cd8b04676b04 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.836041] env[68638]: DEBUG nova.scheduler.client.report [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 568.875244] env[68638]: DEBUG oslo_concurrency.lockutils [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquiring lock "6cb1846a-02aa-4dc3-a573-858abf5a0bdf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 568.875539] env[68638]: DEBUG oslo_concurrency.lockutils [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "6cb1846a-02aa-4dc3-a573-858abf5a0bdf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 568.914392] env[68638]: DEBUG nova.network.neutron [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Successfully created port: ddb66c99-6c88-4e52-a360-3b2778771361 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 569.116669] env[68638]: DEBUG nova.compute.manager [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 569.342187] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.189s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 569.342820] env[68638]: DEBUG nova.compute.manager [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 569.380657] env[68638]: DEBUG nova.compute.manager [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 569.649967] env[68638]: DEBUG oslo_concurrency.lockutils [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 569.650382] env[68638]: DEBUG oslo_concurrency.lockutils [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 569.652356] env[68638]: INFO nova.compute.claims [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 569.844628] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "7617a7b1-3b21-4d38-b090-1d35bc74637b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 569.844960] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "7617a7b1-3b21-4d38-b090-1d35bc74637b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 569.847190] env[68638]: DEBUG nova.compute.utils [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 569.850493] env[68638]: DEBUG nova.compute.manager [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Not allocating networking since 'none' was specified. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 569.910529] env[68638]: DEBUG oslo_concurrency.lockutils [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 570.353459] env[68638]: DEBUG nova.compute.manager [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 570.364254] env[68638]: DEBUG nova.compute.manager [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 570.758788] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f1451ef-384d-457a-b5ac-d6bf2f9c77b6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.768544] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6604c2c5-54e9-4de9-b40b-c3d8e5f1d4fd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.806801] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5915b06d-f5d7-4d4f-a566-70d3dc9425e4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.814794] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b097b11-a2e4-4f25-b5c7-d8eafda8ef6b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.830612] env[68638]: DEBUG nova.compute.provider_tree [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 570.890970] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 571.332476] env[68638]: DEBUG nova.scheduler.client.report [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 571.380545] env[68638]: DEBUG nova.compute.manager [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 571.423432] env[68638]: DEBUG nova.virt.hardware [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 571.423673] env[68638]: DEBUG nova.virt.hardware [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 571.423820] env[68638]: DEBUG nova.virt.hardware [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 571.423990] env[68638]: DEBUG nova.virt.hardware [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 571.424838] env[68638]: DEBUG nova.virt.hardware [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 571.425580] env[68638]: DEBUG nova.virt.hardware [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 571.426099] env[68638]: DEBUG nova.virt.hardware [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 571.426099] env[68638]: DEBUG nova.virt.hardware [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 571.426246] env[68638]: DEBUG nova.virt.hardware [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 571.426357] env[68638]: DEBUG nova.virt.hardware [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 571.426524] env[68638]: DEBUG nova.virt.hardware [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 571.427807] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31cc57fb-0efd-49d6-9839-a3201a5b6986 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.438491] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef6845e-dbe5-4daa-8f03-d1f22e979bee {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.455817] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Instance VIF info [] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 571.466194] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 571.466944] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-51114a32-328a-43d8-b5e1-8aab0bb6147e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.481227] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Created folder: OpenStack in parent group-v4. [ 571.482201] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Creating folder: Project (d66b0c5fc457471a8b7d4569e8395a92). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 571.482201] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c6036420-94f6-4fd6-8f72-3b36e72dd1c6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.492320] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Created folder: Project (d66b0c5fc457471a8b7d4569e8395a92) in parent group-v569734. [ 571.492517] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Creating folder: Instances. Parent ref: group-v569735. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 571.492756] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-67a007df-f455-418c-8433-818b0e99e9e8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.501011] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Created folder: Instances in parent group-v569735. [ 571.501267] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 571.501446] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 571.501635] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-521a2838-ec70-426d-bbce-53c307ce6e97 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.519324] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 571.519324] env[68638]: value = "task-2832938" [ 571.519324] env[68638]: _type = "Task" [ 571.519324] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.537681] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2832938, 'name': CreateVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.839309] env[68638]: DEBUG oslo_concurrency.lockutils [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.189s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 571.839814] env[68638]: DEBUG nova.compute.manager [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 571.844850] env[68638]: DEBUG oslo_concurrency.lockutils [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.935s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 571.846302] env[68638]: INFO nova.compute.claims [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 571.923046] env[68638]: DEBUG nova.network.neutron [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Successfully updated port: ddb66c99-6c88-4e52-a360-3b2778771361 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 572.034572] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2832938, 'name': CreateVM_Task, 'duration_secs': 0.302387} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.034877] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 572.037237] env[68638]: DEBUG oslo_vmware.service [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a8d929-b5a7-44f7-a04d-9e2b6e7bb6c4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.051209] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.051209] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 572.051209] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 572.051209] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-767d0131-a846-4b76-ac95-b7618d1fb529 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.056309] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Waiting for the task: (returnval){ [ 572.056309] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52bed362-6ffc-8d48-a983-997729e68575" [ 572.056309] env[68638]: _type = "Task" [ 572.056309] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.065213] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52bed362-6ffc-8d48-a983-997729e68575, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.347027] env[68638]: DEBUG nova.compute.utils [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 572.349671] env[68638]: DEBUG nova.compute.manager [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 572.349671] env[68638]: DEBUG nova.network.neutron [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 572.429773] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Acquiring lock "refresh_cache-05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.429927] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Acquired lock "refresh_cache-05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 572.430090] env[68638]: DEBUG nova.network.neutron [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 572.476997] env[68638]: DEBUG nova.policy [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5d2c4fd7883042f6af47375618176d81', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0f86fe0709e74230a2688619955c9483', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 572.572145] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 572.573236] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 572.573236] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.573692] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 572.574210] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 572.574812] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac5b2b0e-b68d-49f1-b5cc-6d2326f4520c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.598014] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 572.598669] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 572.599978] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b66c037-7b38-4dd1-a74f-caf1865e9d8b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.609488] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a957452-b624-41b2-8823-5fa56c5de27a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.617850] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Waiting for the task: (returnval){ [ 572.617850] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52390c55-012f-3152-877f-6159112706d2" [ 572.617850] env[68638]: _type = "Task" [ 572.617850] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.626668] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52390c55-012f-3152-877f-6159112706d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.861459] env[68638]: DEBUG nova.compute.manager [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 572.994952] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62e11456-7c04-462e-aa08-93ea3348f24c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.002525] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d45ddd88-6681-4128-9563-0cb30f7155d4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.048683] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc7f26f9-cc8b-4029-ac48-474fd901aedc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.056408] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15162f4d-0120-42ec-b396-b1982b1e0c08 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.070160] env[68638]: DEBUG nova.compute.provider_tree [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 573.115393] env[68638]: DEBUG nova.network.neutron [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 573.134780] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Preparing fetch location {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 573.135660] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Creating directory with path [datastore1] vmware_temp/895a1541-1f11-4890-8fde-e4dd446c6519/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 573.135660] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fad274a4-3902-42e4-9371-e200fd7186d6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.158197] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Created directory with path [datastore1] vmware_temp/895a1541-1f11-4890-8fde-e4dd446c6519/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 573.161113] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Fetch image to [datastore1] vmware_temp/895a1541-1f11-4890-8fde-e4dd446c6519/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/tmp-sparse.vmdk {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 573.161113] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Downloading image file data ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 to [datastore1] vmware_temp/895a1541-1f11-4890-8fde-e4dd446c6519/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/tmp-sparse.vmdk on the data store datastore1 {{(pid=68638) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 573.161113] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30252d15-f7bb-455d-a116-2c18c72d04b1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.168845] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-598fae32-e580-477f-a13e-ce4a3de16fc2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.181051] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a75e4f1b-da57-4940-920c-1742b5512459 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.220366] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0665b43b-86df-4c28-97ce-f113139dd380 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.226550] env[68638]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b5b4db51-2b1a-4502-b1e5-cbbee7d4523c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.239142] env[68638]: DEBUG nova.network.neutron [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Successfully created port: bcca1912-c5f3-4a93-a224-b3707bd3c2b0 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 573.320462] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Downloading image file data ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 to the data store datastore1 {{(pid=68638) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 573.349751] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 573.353922] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 573.354344] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 573.354517] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 573.354787] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 573.354872] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 573.355069] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 573.355287] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68638) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 573.355501] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager.update_available_resource {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 573.400886] env[68638]: DEBUG oslo_vmware.rw_handles [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/895a1541-1f11-4890-8fde-e4dd446c6519/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68638) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 573.573955] env[68638]: DEBUG nova.scheduler.client.report [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 573.755463] env[68638]: DEBUG nova.network.neutron [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Updating instance_info_cache with network_info: [{"id": "ddb66c99-6c88-4e52-a360-3b2778771361", "address": "fa:16:3e:a0:e8:b4", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.45", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapddb66c99-6c", "ovs_interfaceid": "ddb66c99-6c88-4e52-a360-3b2778771361", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 573.798228] env[68638]: DEBUG nova.compute.manager [req-7e6ece75-2776-4778-b200-e966fbeb1803 req-32c40288-9243-426e-a885-99426b32ae88 service nova] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Received event network-vif-plugged-ddb66c99-6c88-4e52-a360-3b2778771361 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 573.798573] env[68638]: DEBUG oslo_concurrency.lockutils [req-7e6ece75-2776-4778-b200-e966fbeb1803 req-32c40288-9243-426e-a885-99426b32ae88 service nova] Acquiring lock "05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 573.798818] env[68638]: DEBUG oslo_concurrency.lockutils [req-7e6ece75-2776-4778-b200-e966fbeb1803 req-32c40288-9243-426e-a885-99426b32ae88 service nova] Lock "05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 573.799117] env[68638]: DEBUG oslo_concurrency.lockutils [req-7e6ece75-2776-4778-b200-e966fbeb1803 req-32c40288-9243-426e-a885-99426b32ae88 service nova] Lock "05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 573.799345] env[68638]: DEBUG nova.compute.manager [req-7e6ece75-2776-4778-b200-e966fbeb1803 req-32c40288-9243-426e-a885-99426b32ae88 service nova] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] No waiting events found dispatching network-vif-plugged-ddb66c99-6c88-4e52-a360-3b2778771361 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 573.799544] env[68638]: WARNING nova.compute.manager [req-7e6ece75-2776-4778-b200-e966fbeb1803 req-32c40288-9243-426e-a885-99426b32ae88 service nova] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Received unexpected event network-vif-plugged-ddb66c99-6c88-4e52-a360-3b2778771361 for instance with vm_state building and task_state spawning. [ 573.858788] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 573.879296] env[68638]: DEBUG nova.compute.manager [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 573.918112] env[68638]: DEBUG nova.virt.hardware [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 573.918531] env[68638]: DEBUG nova.virt.hardware [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 573.918779] env[68638]: DEBUG nova.virt.hardware [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 573.918993] env[68638]: DEBUG nova.virt.hardware [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 573.919166] env[68638]: DEBUG nova.virt.hardware [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 573.919358] env[68638]: DEBUG nova.virt.hardware [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 573.919631] env[68638]: DEBUG nova.virt.hardware [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 573.919793] env[68638]: DEBUG nova.virt.hardware [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 573.920014] env[68638]: DEBUG nova.virt.hardware [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 573.920342] env[68638]: DEBUG nova.virt.hardware [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 573.920645] env[68638]: DEBUG nova.virt.hardware [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 573.922169] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6422b0f4-d7b4-4795-a939-b7916d1c55eb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.937040] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50da5ed0-695f-4371-a280-940a196db28d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.081113] env[68638]: DEBUG oslo_concurrency.lockutils [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.236s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 574.081836] env[68638]: DEBUG nova.compute.manager [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 574.091139] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.199s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 574.092113] env[68638]: INFO nova.compute.claims [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 574.099581] env[68638]: DEBUG oslo_vmware.rw_handles [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Completed reading data from the image iterator. {{(pid=68638) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 574.099788] env[68638]: DEBUG oslo_vmware.rw_handles [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/895a1541-1f11-4890-8fde-e4dd446c6519/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68638) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 574.242519] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Downloaded image file data ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 to vmware_temp/895a1541-1f11-4890-8fde-e4dd446c6519/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/tmp-sparse.vmdk on the data store datastore1 {{(pid=68638) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 574.242790] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Caching image {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 574.242981] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Copying Virtual Disk [datastore1] vmware_temp/895a1541-1f11-4890-8fde-e4dd446c6519/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/tmp-sparse.vmdk to [datastore1] vmware_temp/895a1541-1f11-4890-8fde-e4dd446c6519/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 574.243781] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-87396b4a-4895-47c9-926a-9ab26547056a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.251015] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Waiting for the task: (returnval){ [ 574.251015] env[68638]: value = "task-2832939" [ 574.251015] env[68638]: _type = "Task" [ 574.251015] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.263454] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Releasing lock "refresh_cache-05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 574.263769] env[68638]: DEBUG nova.compute.manager [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Instance network_info: |[{"id": "ddb66c99-6c88-4e52-a360-3b2778771361", "address": "fa:16:3e:a0:e8:b4", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.45", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapddb66c99-6c", "ovs_interfaceid": "ddb66c99-6c88-4e52-a360-3b2778771361", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 574.264369] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': task-2832939, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.264486] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a0:e8:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '205fb402-8eaf-4b61-8f57-8f216024179a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ddb66c99-6c88-4e52-a360-3b2778771361', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 574.278637] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Creating folder: Project (f54818a99fac4274befb43a064c49c31). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 574.279355] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f807f0cb-3b6b-4334-9073-8bdaef46e856 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.288757] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Created folder: Project (f54818a99fac4274befb43a064c49c31) in parent group-v569734. [ 574.288984] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Creating folder: Instances. Parent ref: group-v569738. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 574.289249] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-213a4ec4-391a-4db6-9853-225df81962f6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.298975] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Created folder: Instances in parent group-v569738. [ 574.299274] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 574.299475] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 574.300515] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1339e6a7-f094-4142-82d3-6e9f307aee2b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.319765] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 574.319765] env[68638]: value = "task-2832942" [ 574.319765] env[68638]: _type = "Task" [ 574.319765] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.329462] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2832942, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.602915] env[68638]: DEBUG nova.compute.utils [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 574.618123] env[68638]: DEBUG nova.compute.manager [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 574.618123] env[68638]: DEBUG nova.network.neutron [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 574.765401] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': task-2832939, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.786335] env[68638]: DEBUG nova.policy [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fdfdcc628e4e40b586b0b71bc0ed5b19', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d2c1dcc55dd42c5b791dd8f1841479b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 574.832476] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2832942, 'name': CreateVM_Task, 'duration_secs': 0.455757} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.832710] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 574.847126] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.847126] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 574.847126] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 574.847372] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fb5f910-093a-41d2-8fbe-3a3df9fe3d68 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.854705] env[68638]: DEBUG oslo_vmware.api [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Waiting for the task: (returnval){ [ 574.854705] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52f3da5e-215b-447a-8cd9-c10f4913c20f" [ 574.854705] env[68638]: _type = "Task" [ 574.854705] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.864643] env[68638]: DEBUG oslo_vmware.api [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f3da5e-215b-447a-8cd9-c10f4913c20f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.132066] env[68638]: DEBUG nova.compute.manager [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 575.203561] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquiring lock "e3cf739a-3104-473d-af66-d9974ed1a222" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 575.203998] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Lock "e3cf739a-3104-473d-af66-d9974ed1a222" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 575.243940] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3374f35b-92ed-4f5e-9990-0a4376352e50 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.252227] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c960410d-8cbf-4380-bcd8-f2a53ee8519b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.296723] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a369e6d0-ff5b-439a-9e47-4c6ef2f2a388 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.307367] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': task-2832939, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.71314} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.307367] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Copied Virtual Disk [datastore1] vmware_temp/895a1541-1f11-4890-8fde-e4dd446c6519/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/tmp-sparse.vmdk to [datastore1] vmware_temp/895a1541-1f11-4890-8fde-e4dd446c6519/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 575.307367] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Deleting the datastore file [datastore1] vmware_temp/895a1541-1f11-4890-8fde-e4dd446c6519/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/tmp-sparse.vmdk {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 575.307367] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cb59a2b4-34ee-41aa-8bd5-558e08a2db9e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.310836] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e544bb9-2af2-4c38-ae65-579f013cab80 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.325606] env[68638]: DEBUG nova.compute.provider_tree [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 575.329524] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Waiting for the task: (returnval){ [ 575.329524] env[68638]: value = "task-2832943" [ 575.329524] env[68638]: _type = "Task" [ 575.329524] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.341956] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': task-2832943, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.349291] env[68638]: DEBUG nova.network.neutron [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Successfully created port: cdd18e96-0a08-4bc0-9252-0044e54e0084 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 575.363986] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 575.364263] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 575.364476] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.675606] env[68638]: DEBUG nova.network.neutron [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Successfully updated port: bcca1912-c5f3-4a93-a224-b3707bd3c2b0 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 575.708477] env[68638]: DEBUG nova.compute.manager [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 575.833567] env[68638]: DEBUG nova.scheduler.client.report [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 575.849014] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': task-2832943, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024773} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.849378] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 575.855701] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Moving file from [datastore1] vmware_temp/895a1541-1f11-4890-8fde-e4dd446c6519/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 to [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9. {{(pid=68638) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 575.855701] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-82db17c9-b7e3-4119-a495-1b2c04b089e5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.863947] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Waiting for the task: (returnval){ [ 575.863947] env[68638]: value = "task-2832944" [ 575.863947] env[68638]: _type = "Task" [ 575.863947] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.873331] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': task-2832944, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.142930] env[68638]: DEBUG nova.compute.manager [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 576.179686] env[68638]: DEBUG oslo_concurrency.lockutils [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Acquiring lock "refresh_cache-54af9c38-c8b6-4ef9-be63-de545dcc0da5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.179686] env[68638]: DEBUG oslo_concurrency.lockutils [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Acquired lock "refresh_cache-54af9c38-c8b6-4ef9-be63-de545dcc0da5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 576.179686] env[68638]: DEBUG nova.network.neutron [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 576.235629] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 576.290314] env[68638]: DEBUG nova.virt.hardware [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 576.290559] env[68638]: DEBUG nova.virt.hardware [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 576.290711] env[68638]: DEBUG nova.virt.hardware [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 576.290888] env[68638]: DEBUG nova.virt.hardware [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 576.291074] env[68638]: DEBUG nova.virt.hardware [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 576.291239] env[68638]: DEBUG nova.virt.hardware [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 576.292307] env[68638]: DEBUG nova.virt.hardware [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 576.292307] env[68638]: DEBUG nova.virt.hardware [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 576.292307] env[68638]: DEBUG nova.virt.hardware [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 576.292307] env[68638]: DEBUG nova.virt.hardware [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 576.292307] env[68638]: DEBUG nova.virt.hardware [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 576.292946] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7881a346-4e44-4818-8c1f-b0d46d3c5a44 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.305019] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b06e10-bfe9-4935-84a0-715520f0ffd6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.339743] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.250s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 576.340266] env[68638]: DEBUG nova.compute.manager [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 576.342743] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.484s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 576.342909] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 576.343065] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68638) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 576.343337] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.108s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 576.344721] env[68638]: INFO nova.compute.claims [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 576.347612] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b75550-a3da-4880-bf27-925d2cf2da5f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.355717] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ac1478-b352-4802-bd20-75cab9fb976b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.375427] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e34867-9b85-4b62-a16e-33cd1ebf79f2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.386991] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eaff7c2-41d3-450f-8d10-c500e89ee877 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.389676] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': task-2832944, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.025641} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.390560] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] File moved {{(pid=68638) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 576.390560] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Cleaning up location [datastore1] vmware_temp/895a1541-1f11-4890-8fde-e4dd446c6519 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 576.390560] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Deleting the datastore file [datastore1] vmware_temp/895a1541-1f11-4890-8fde-e4dd446c6519 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 576.392279] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe4eb0d4-ea38-43e4-9a19-3e8a80ed0733 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.418878] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181090MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=68638) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 576.419153] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 576.425552] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Waiting for the task: (returnval){ [ 576.425552] env[68638]: value = "task-2832945" [ 576.425552] env[68638]: _type = "Task" [ 576.425552] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.435511] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': task-2832945, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.849523] env[68638]: DEBUG nova.compute.utils [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 576.850966] env[68638]: DEBUG nova.compute.manager [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 576.851158] env[68638]: DEBUG nova.network.neutron [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 576.935335] env[68638]: DEBUG nova.policy [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34d3f08ca4c44eecb3238404c3728f0c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e92752f6508d4e0eae7e29247444a38f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 576.942621] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': task-2832945, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025473} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.942855] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 576.943604] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d61b434-e763-415e-a44e-b76b43623756 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.946430] env[68638]: DEBUG nova.network.neutron [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 576.951277] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Waiting for the task: (returnval){ [ 576.951277] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5296c95d-2b47-6cd6-ae12-6a02019fb379" [ 576.951277] env[68638]: _type = "Task" [ 576.951277] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.959343] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5296c95d-2b47-6cd6-ae12-6a02019fb379, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.303776] env[68638]: DEBUG nova.compute.manager [req-dd65d6e8-3578-4af0-a03a-302846b05217 req-e2654d14-57f9-4b7b-9627-eef415d7bcc2 service nova] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Received event network-changed-ddb66c99-6c88-4e52-a360-3b2778771361 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 577.304017] env[68638]: DEBUG nova.compute.manager [req-dd65d6e8-3578-4af0-a03a-302846b05217 req-e2654d14-57f9-4b7b-9627-eef415d7bcc2 service nova] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Refreshing instance network info cache due to event network-changed-ddb66c99-6c88-4e52-a360-3b2778771361. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 577.304910] env[68638]: DEBUG oslo_concurrency.lockutils [req-dd65d6e8-3578-4af0-a03a-302846b05217 req-e2654d14-57f9-4b7b-9627-eef415d7bcc2 service nova] Acquiring lock "refresh_cache-05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 577.305121] env[68638]: DEBUG oslo_concurrency.lockutils [req-dd65d6e8-3578-4af0-a03a-302846b05217 req-e2654d14-57f9-4b7b-9627-eef415d7bcc2 service nova] Acquired lock "refresh_cache-05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 577.305299] env[68638]: DEBUG nova.network.neutron [req-dd65d6e8-3578-4af0-a03a-302846b05217 req-e2654d14-57f9-4b7b-9627-eef415d7bcc2 service nova] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Refreshing network info cache for port ddb66c99-6c88-4e52-a360-3b2778771361 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 577.341192] env[68638]: DEBUG nova.network.neutron [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Updating instance_info_cache with network_info: [{"id": "bcca1912-c5f3-4a93-a224-b3707bd3c2b0", "address": "fa:16:3e:41:77:17", "network": {"id": "e3ba6b79-0a0f-4f1b-815d-91c564b9aa96", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1103305689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f86fe0709e74230a2688619955c9483", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcca1912-c5", "ovs_interfaceid": "bcca1912-c5f3-4a93-a224-b3707bd3c2b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.356500] env[68638]: DEBUG nova.compute.manager [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 577.469780] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5296c95d-2b47-6cd6-ae12-6a02019fb379, 'name': SearchDatastore_Task, 'duration_secs': 0.010437} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.470063] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 577.470488] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc/4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 577.470650] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 577.470857] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 577.471314] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f1b44cb7-0b6e-4760-b316-c8e5f46f4dd5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.474879] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cae45d34-0b94-4353-a3a9-743d2892b688 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.484316] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Waiting for the task: (returnval){ [ 577.484316] env[68638]: value = "task-2832946" [ 577.484316] env[68638]: _type = "Task" [ 577.484316] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.491512] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 577.491818] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 577.493086] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22ab85c2-ecd6-40a2-80de-4e1f703f5344 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.500803] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': task-2832946, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.501896] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba39639-bd2a-467f-8c9a-0f246ad2443f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.504988] env[68638]: DEBUG oslo_vmware.api [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Waiting for the task: (returnval){ [ 577.504988] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528e5056-9ec5-c9e3-784a-fedd406d6cb4" [ 577.504988] env[68638]: _type = "Task" [ 577.504988] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.512020] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c91e9c-00ae-4d9c-b51b-8fdf6b00dbe8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.518469] env[68638]: DEBUG oslo_vmware.api [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528e5056-9ec5-c9e3-784a-fedd406d6cb4, 'name': SearchDatastore_Task, 'duration_secs': 0.008464} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.519525] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-681bba09-ca0d-4e20-bb7c-996aecd475a9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.558456] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6918d42c-15b6-482a-aead-c2d01deb1a08 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.563114] env[68638]: DEBUG oslo_vmware.api [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Waiting for the task: (returnval){ [ 577.563114] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]526aebba-75e1-d776-3517-5b1d5d5d0609" [ 577.563114] env[68638]: _type = "Task" [ 577.563114] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.570419] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6664c1ee-09e8-454d-84d3-e9f3d81be6ba {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.577482] env[68638]: DEBUG oslo_vmware.api [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]526aebba-75e1-d776-3517-5b1d5d5d0609, 'name': SearchDatastore_Task, 'duration_secs': 0.008381} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.578439] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 577.578439] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944/05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 577.579113] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-61ab1af0-0256-4c9a-b4d9-f8f4c0c5a913 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.588628] env[68638]: DEBUG nova.compute.provider_tree [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 577.590549] env[68638]: DEBUG nova.network.neutron [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Successfully created port: 41ce015b-dfb7-4031-a11b-8dfd0e29bb62 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 577.594040] env[68638]: DEBUG nova.network.neutron [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Successfully updated port: cdd18e96-0a08-4bc0-9252-0044e54e0084 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 577.598029] env[68638]: DEBUG oslo_vmware.api [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Waiting for the task: (returnval){ [ 577.598029] env[68638]: value = "task-2832947" [ 577.598029] env[68638]: _type = "Task" [ 577.598029] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.605694] env[68638]: DEBUG oslo_vmware.api [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2832947, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.853031] env[68638]: DEBUG oslo_concurrency.lockutils [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Releasing lock "refresh_cache-54af9c38-c8b6-4ef9-be63-de545dcc0da5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 577.853031] env[68638]: DEBUG nova.compute.manager [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Instance network_info: |[{"id": "bcca1912-c5f3-4a93-a224-b3707bd3c2b0", "address": "fa:16:3e:41:77:17", "network": {"id": "e3ba6b79-0a0f-4f1b-815d-91c564b9aa96", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1103305689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f86fe0709e74230a2688619955c9483", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcca1912-c5", "ovs_interfaceid": "bcca1912-c5f3-4a93-a224-b3707bd3c2b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 577.853304] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:77:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '095fbf26-7367-4f9e-87c5-2965b64b0b0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bcca1912-c5f3-4a93-a224-b3707bd3c2b0', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 577.868587] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Creating folder: Project (0f86fe0709e74230a2688619955c9483). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 577.876834] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b9c4e266-1688-48e8-83c9-abc0335904f4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.891527] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Created folder: Project (0f86fe0709e74230a2688619955c9483) in parent group-v569734. [ 577.891728] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Creating folder: Instances. Parent ref: group-v569741. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 577.893863] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f110701e-5b4e-4615-8f21-70ca83e97ea4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.904645] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Created folder: Instances in parent group-v569741. [ 577.906152] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 577.906152] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 577.906152] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c3dff05c-49a5-4b9a-8e18-ef4cc8753082 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.928692] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquiring lock "c80895d5-1a59-4779-9da9-9aeec10bc395" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 577.929035] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "c80895d5-1a59-4779-9da9-9aeec10bc395" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 577.934756] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 577.934756] env[68638]: value = "task-2832950" [ 577.934756] env[68638]: _type = "Task" [ 577.934756] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.941819] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2832950, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.011932] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': task-2832946, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.516595} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.011932] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc/4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 578.011932] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 578.011932] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b9292a83-4e5a-4d76-ab84-c8dd51da2298 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.016994] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Waiting for the task: (returnval){ [ 578.016994] env[68638]: value = "task-2832951" [ 578.016994] env[68638]: _type = "Task" [ 578.016994] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.033025] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': task-2832951, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.096288] env[68638]: DEBUG nova.scheduler.client.report [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 578.100657] env[68638]: DEBUG oslo_concurrency.lockutils [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquiring lock "refresh_cache-6cb1846a-02aa-4dc3-a573-858abf5a0bdf" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.100657] env[68638]: DEBUG oslo_concurrency.lockutils [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquired lock "refresh_cache-6cb1846a-02aa-4dc3-a573-858abf5a0bdf" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 578.100657] env[68638]: DEBUG nova.network.neutron [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 578.124068] env[68638]: DEBUG oslo_vmware.api [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2832947, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.381657] env[68638]: DEBUG nova.compute.manager [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 578.414845] env[68638]: DEBUG nova.virt.hardware [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 578.415391] env[68638]: DEBUG nova.virt.hardware [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 578.415391] env[68638]: DEBUG nova.virt.hardware [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 578.415544] env[68638]: DEBUG nova.virt.hardware [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 578.415598] env[68638]: DEBUG nova.virt.hardware [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 578.415689] env[68638]: DEBUG nova.virt.hardware [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 578.415939] env[68638]: DEBUG nova.virt.hardware [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 578.420822] env[68638]: DEBUG nova.virt.hardware [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 578.420822] env[68638]: DEBUG nova.virt.hardware [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 578.420822] env[68638]: DEBUG nova.virt.hardware [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 578.420822] env[68638]: DEBUG nova.virt.hardware [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 578.424389] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad4c6fc0-99cc-431e-b1c6-e0089edf21f0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.435715] env[68638]: DEBUG nova.compute.manager [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 578.442909] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6efc8ae-7d1b-42ec-8184-8ada4c40967f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.460317] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2832950, 'name': CreateVM_Task, 'duration_secs': 0.403903} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.460473] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 578.461160] env[68638]: DEBUG oslo_concurrency.lockutils [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.461322] env[68638]: DEBUG oslo_concurrency.lockutils [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 578.461621] env[68638]: DEBUG oslo_concurrency.lockutils [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 578.461858] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ef4431b-34e3-44d5-acfd-5bd6d24ff22c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.467452] env[68638]: DEBUG oslo_vmware.api [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Waiting for the task: (returnval){ [ 578.467452] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52841a63-11b9-0b07-1355-65255afaaf75" [ 578.467452] env[68638]: _type = "Task" [ 578.467452] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.479436] env[68638]: DEBUG oslo_vmware.api [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52841a63-11b9-0b07-1355-65255afaaf75, 'name': SearchDatastore_Task, 'duration_secs': 0.009432} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.479712] env[68638]: DEBUG oslo_concurrency.lockutils [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 578.479936] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 578.480172] env[68638]: DEBUG oslo_concurrency.lockutils [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.480311] env[68638]: DEBUG oslo_concurrency.lockutils [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 578.480475] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 578.480717] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-591af495-8eaa-4fcd-82a6-23a3167ac411 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.489425] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 578.489602] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 578.490478] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-119c3078-bf72-4ccd-823c-4c7ae9c2456c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.497021] env[68638]: DEBUG oslo_vmware.api [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Waiting for the task: (returnval){ [ 578.497021] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52bf5380-3e16-38ef-1af8-b77ebf4de66c" [ 578.497021] env[68638]: _type = "Task" [ 578.497021] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.498088] env[68638]: DEBUG nova.network.neutron [req-dd65d6e8-3578-4af0-a03a-302846b05217 req-e2654d14-57f9-4b7b-9627-eef415d7bcc2 service nova] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Updated VIF entry in instance network info cache for port ddb66c99-6c88-4e52-a360-3b2778771361. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 578.498449] env[68638]: DEBUG nova.network.neutron [req-dd65d6e8-3578-4af0-a03a-302846b05217 req-e2654d14-57f9-4b7b-9627-eef415d7bcc2 service nova] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Updating instance_info_cache with network_info: [{"id": "ddb66c99-6c88-4e52-a360-3b2778771361", "address": "fa:16:3e:a0:e8:b4", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.45", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapddb66c99-6c", "ovs_interfaceid": "ddb66c99-6c88-4e52-a360-3b2778771361", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.514021] env[68638]: DEBUG oslo_vmware.api [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52bf5380-3e16-38ef-1af8-b77ebf4de66c, 'name': SearchDatastore_Task, 'duration_secs': 0.008726} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.514021] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fa63275-60a0-47b9-9567-f21c5974d3e9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.519010] env[68638]: DEBUG oslo_vmware.api [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Waiting for the task: (returnval){ [ 578.519010] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5215df2f-4fe5-54b6-cce3-6afeb831d9dd" [ 578.519010] env[68638]: _type = "Task" [ 578.519010] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.531026] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': task-2832951, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.139274} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.535643] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 578.536590] env[68638]: DEBUG oslo_vmware.api [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5215df2f-4fe5-54b6-cce3-6afeb831d9dd, 'name': SearchDatastore_Task, 'duration_secs': 0.00845} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.537477] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717c0510-501d-413c-9b7e-93456010fea1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.540042] env[68638]: DEBUG oslo_concurrency.lockutils [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 578.540310] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 54af9c38-c8b6-4ef9-be63-de545dcc0da5/54af9c38-c8b6-4ef9-be63-de545dcc0da5.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 578.540556] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea6e8338-5e4c-4032-b203-59c4b0b26a53 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.566439] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc/4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 578.569588] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e410177-74a0-4321-94f7-7820f978e07d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.583672] env[68638]: DEBUG oslo_vmware.api [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Waiting for the task: (returnval){ [ 578.583672] env[68638]: value = "task-2832952" [ 578.583672] env[68638]: _type = "Task" [ 578.583672] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.591983] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "14772ba8-bde2-42ef-9a37-df876c8af321" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 578.592128] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "14772ba8-bde2-42ef-9a37-df876c8af321" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 578.596098] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Waiting for the task: (returnval){ [ 578.596098] env[68638]: value = "task-2832953" [ 578.596098] env[68638]: _type = "Task" [ 578.596098] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.603247] env[68638]: DEBUG oslo_vmware.api [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Task: {'id': task-2832952, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.610242] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': task-2832953, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.613213] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.270s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 578.615026] env[68638]: DEBUG nova.compute.manager [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 578.624458] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.205s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 578.625960] env[68638]: DEBUG oslo_vmware.api [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2832947, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.78376} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.626901] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944/05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 578.629035] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 578.629035] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-34b8317d-0607-4f59-983c-992b808e15e5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.635180] env[68638]: DEBUG oslo_vmware.api [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Waiting for the task: (returnval){ [ 578.635180] env[68638]: value = "task-2832954" [ 578.635180] env[68638]: _type = "Task" [ 578.635180] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.650582] env[68638]: DEBUG oslo_vmware.api [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2832954, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.721822] env[68638]: DEBUG nova.network.neutron [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 578.975866] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 579.008203] env[68638]: DEBUG oslo_concurrency.lockutils [req-dd65d6e8-3578-4af0-a03a-302846b05217 req-e2654d14-57f9-4b7b-9627-eef415d7bcc2 service nova] Releasing lock "refresh_cache-05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 579.008203] env[68638]: DEBUG nova.compute.manager [req-dd65d6e8-3578-4af0-a03a-302846b05217 req-e2654d14-57f9-4b7b-9627-eef415d7bcc2 service nova] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Received event network-vif-plugged-bcca1912-c5f3-4a93-a224-b3707bd3c2b0 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 579.008357] env[68638]: DEBUG oslo_concurrency.lockutils [req-dd65d6e8-3578-4af0-a03a-302846b05217 req-e2654d14-57f9-4b7b-9627-eef415d7bcc2 service nova] Acquiring lock "54af9c38-c8b6-4ef9-be63-de545dcc0da5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 579.008620] env[68638]: DEBUG oslo_concurrency.lockutils [req-dd65d6e8-3578-4af0-a03a-302846b05217 req-e2654d14-57f9-4b7b-9627-eef415d7bcc2 service nova] Lock "54af9c38-c8b6-4ef9-be63-de545dcc0da5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 579.008786] env[68638]: DEBUG oslo_concurrency.lockutils [req-dd65d6e8-3578-4af0-a03a-302846b05217 req-e2654d14-57f9-4b7b-9627-eef415d7bcc2 service nova] Lock "54af9c38-c8b6-4ef9-be63-de545dcc0da5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 579.009528] env[68638]: DEBUG nova.compute.manager [req-dd65d6e8-3578-4af0-a03a-302846b05217 req-e2654d14-57f9-4b7b-9627-eef415d7bcc2 service nova] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] No waiting events found dispatching network-vif-plugged-bcca1912-c5f3-4a93-a224-b3707bd3c2b0 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 579.009528] env[68638]: WARNING nova.compute.manager [req-dd65d6e8-3578-4af0-a03a-302846b05217 req-e2654d14-57f9-4b7b-9627-eef415d7bcc2 service nova] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Received unexpected event network-vif-plugged-bcca1912-c5f3-4a93-a224-b3707bd3c2b0 for instance with vm_state building and task_state spawning. [ 579.009528] env[68638]: DEBUG nova.compute.manager [req-dd65d6e8-3578-4af0-a03a-302846b05217 req-e2654d14-57f9-4b7b-9627-eef415d7bcc2 service nova] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Received event network-changed-bcca1912-c5f3-4a93-a224-b3707bd3c2b0 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 579.009528] env[68638]: DEBUG nova.compute.manager [req-dd65d6e8-3578-4af0-a03a-302846b05217 req-e2654d14-57f9-4b7b-9627-eef415d7bcc2 service nova] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Refreshing instance network info cache due to event network-changed-bcca1912-c5f3-4a93-a224-b3707bd3c2b0. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 579.010145] env[68638]: DEBUG oslo_concurrency.lockutils [req-dd65d6e8-3578-4af0-a03a-302846b05217 req-e2654d14-57f9-4b7b-9627-eef415d7bcc2 service nova] Acquiring lock "refresh_cache-54af9c38-c8b6-4ef9-be63-de545dcc0da5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.010145] env[68638]: DEBUG oslo_concurrency.lockutils [req-dd65d6e8-3578-4af0-a03a-302846b05217 req-e2654d14-57f9-4b7b-9627-eef415d7bcc2 service nova] Acquired lock "refresh_cache-54af9c38-c8b6-4ef9-be63-de545dcc0da5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 579.010145] env[68638]: DEBUG nova.network.neutron [req-dd65d6e8-3578-4af0-a03a-302846b05217 req-e2654d14-57f9-4b7b-9627-eef415d7bcc2 service nova] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Refreshing network info cache for port bcca1912-c5f3-4a93-a224-b3707bd3c2b0 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 579.012394] env[68638]: DEBUG nova.network.neutron [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Updating instance_info_cache with network_info: [{"id": "cdd18e96-0a08-4bc0-9252-0044e54e0084", "address": "fa:16:3e:0c:e3:1e", "network": {"id": "b3a6cbc1-a4f3-4ceb-b606-42cab79beecb", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1741978212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d2c1dcc55dd42c5b791dd8f1841479b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdd18e96-0a", "ovs_interfaceid": "cdd18e96-0a08-4bc0-9252-0044e54e0084", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 579.106483] env[68638]: DEBUG nova.compute.manager [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 579.109749] env[68638]: DEBUG oslo_vmware.api [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Task: {'id': task-2832952, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478416} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.115458] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 54af9c38-c8b6-4ef9-be63-de545dcc0da5/54af9c38-c8b6-4ef9-be63-de545dcc0da5.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 579.115730] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 579.118133] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3f95761f-745a-4bf0-8ee4-44830d7fd27a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.126843] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': task-2832953, 'name': ReconfigVM_Task, 'duration_secs': 0.396236} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.126843] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Reconfigured VM instance instance-00000002 to attach disk [datastore1] 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc/4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 579.127417] env[68638]: DEBUG nova.compute.utils [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 579.128688] env[68638]: DEBUG oslo_vmware.api [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Waiting for the task: (returnval){ [ 579.128688] env[68638]: value = "task-2832955" [ 579.128688] env[68638]: _type = "Task" [ 579.128688] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.129800] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0d32ce44-cb9f-44bf-a45e-134b756d698d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.136346] env[68638]: DEBUG nova.compute.manager [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 579.136563] env[68638]: DEBUG nova.network.neutron [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 579.155408] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Waiting for the task: (returnval){ [ 579.155408] env[68638]: value = "task-2832956" [ 579.155408] env[68638]: _type = "Task" [ 579.155408] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.166295] env[68638]: DEBUG oslo_vmware.api [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Task: {'id': task-2832955, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.173249] env[68638]: DEBUG oslo_vmware.api [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2832954, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093002} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.177920] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 579.178387] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': task-2832956, 'name': Rename_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.181208] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-facfb13a-eabe-449f-907e-cc5084601c74 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.206796] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944/05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 579.207550] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5d72959-bdaf-4af8-87e3-16f0dd9013dc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.227446] env[68638]: DEBUG oslo_vmware.api [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Waiting for the task: (returnval){ [ 579.227446] env[68638]: value = "task-2832957" [ 579.227446] env[68638]: _type = "Task" [ 579.227446] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.236124] env[68638]: DEBUG oslo_vmware.api [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2832957, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.273728] env[68638]: DEBUG nova.policy [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec395966626843e3a6f7d3e34e054a06', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b9ffb656ebf844d4b71f49b35a594d4c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 579.516435] env[68638]: DEBUG oslo_concurrency.lockutils [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Releasing lock "refresh_cache-6cb1846a-02aa-4dc3-a573-858abf5a0bdf" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 579.517340] env[68638]: DEBUG nova.compute.manager [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Instance network_info: |[{"id": "cdd18e96-0a08-4bc0-9252-0044e54e0084", "address": "fa:16:3e:0c:e3:1e", "network": {"id": "b3a6cbc1-a4f3-4ceb-b606-42cab79beecb", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1741978212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d2c1dcc55dd42c5b791dd8f1841479b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdd18e96-0a", "ovs_interfaceid": "cdd18e96-0a08-4bc0-9252-0044e54e0084", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 579.519051] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:e3:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f5fe645c-e088-401e-ab53-4ae2981dea72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cdd18e96-0a08-4bc0-9252-0044e54e0084', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 579.533503] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Creating folder: Project (2d2c1dcc55dd42c5b791dd8f1841479b). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 579.535323] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2b0d3e55-6b03-44d0-b74f-156c98e34711 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.538871] env[68638]: DEBUG nova.compute.manager [req-6cf2a461-95c9-4c02-b358-6d732241a51d req-1f27b584-2957-4ffe-9583-009972cbe85d service nova] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Received event network-vif-plugged-cdd18e96-0a08-4bc0-9252-0044e54e0084 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 579.539137] env[68638]: DEBUG oslo_concurrency.lockutils [req-6cf2a461-95c9-4c02-b358-6d732241a51d req-1f27b584-2957-4ffe-9583-009972cbe85d service nova] Acquiring lock "6cb1846a-02aa-4dc3-a573-858abf5a0bdf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 579.539344] env[68638]: DEBUG oslo_concurrency.lockutils [req-6cf2a461-95c9-4c02-b358-6d732241a51d req-1f27b584-2957-4ffe-9583-009972cbe85d service nova] Lock "6cb1846a-02aa-4dc3-a573-858abf5a0bdf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 579.539600] env[68638]: DEBUG oslo_concurrency.lockutils [req-6cf2a461-95c9-4c02-b358-6d732241a51d req-1f27b584-2957-4ffe-9583-009972cbe85d service nova] Lock "6cb1846a-02aa-4dc3-a573-858abf5a0bdf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 579.539662] env[68638]: DEBUG nova.compute.manager [req-6cf2a461-95c9-4c02-b358-6d732241a51d req-1f27b584-2957-4ffe-9583-009972cbe85d service nova] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] No waiting events found dispatching network-vif-plugged-cdd18e96-0a08-4bc0-9252-0044e54e0084 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 579.540089] env[68638]: WARNING nova.compute.manager [req-6cf2a461-95c9-4c02-b358-6d732241a51d req-1f27b584-2957-4ffe-9583-009972cbe85d service nova] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Received unexpected event network-vif-plugged-cdd18e96-0a08-4bc0-9252-0044e54e0084 for instance with vm_state building and task_state spawning. [ 579.549360] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Created folder: Project (2d2c1dcc55dd42c5b791dd8f1841479b) in parent group-v569734. [ 579.549360] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Creating folder: Instances. Parent ref: group-v569744. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 579.549494] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32ed1087-9f0e-4003-94b6-92e01a7a8c1a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.560708] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Created folder: Instances in parent group-v569744. [ 579.561074] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 579.561275] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 579.561489] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9062983-adc8-452c-b513-8ba32363ac80 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.594024] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 579.594024] env[68638]: value = "task-2832960" [ 579.594024] env[68638]: _type = "Task" [ 579.594024] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.606193] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2832960, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.634294] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 579.644251] env[68638]: DEBUG nova.compute.manager [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 579.665888] env[68638]: DEBUG oslo_vmware.api [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Task: {'id': task-2832955, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073495} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.666307] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 579.667883] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1ad33d0-648d-4f9e-8383-5ac30adc135a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.676065] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 579.676314] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 579.676486] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 54af9c38-c8b6-4ef9-be63-de545dcc0da5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 579.676671] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 6cb1846a-02aa-4dc3-a573-858abf5a0bdf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 579.676848] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 7617a7b1-3b21-4d38-b090-1d35bc74637b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 579.677017] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance e3cf739a-3104-473d-af66-d9974ed1a222 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 579.678627] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': task-2832956, 'name': Rename_Task, 'duration_secs': 0.136713} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.681927] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 579.682317] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2408fb81-7d38-4239-b656-cdd1da11a510 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.705725] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 54af9c38-c8b6-4ef9-be63-de545dcc0da5/54af9c38-c8b6-4ef9-be63-de545dcc0da5.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 579.707548] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6804c95-cc00-440d-a614-e1069ee9a4c8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.728885] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Waiting for the task: (returnval){ [ 579.728885] env[68638]: value = "task-2832961" [ 579.728885] env[68638]: _type = "Task" [ 579.728885] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.739737] env[68638]: DEBUG oslo_vmware.api [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Waiting for the task: (returnval){ [ 579.739737] env[68638]: value = "task-2832962" [ 579.739737] env[68638]: _type = "Task" [ 579.739737] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.753923] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': task-2832961, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.754328] env[68638]: DEBUG oslo_vmware.api [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2832957, 'name': ReconfigVM_Task, 'duration_secs': 0.497384} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.755236] env[68638]: DEBUG nova.network.neutron [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Successfully created port: 63f69876-6edd-4869-b1f4-40bf4dd16383 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 579.757896] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Reconfigured VM instance instance-00000001 to attach disk [datastore1] 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944/05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 579.759168] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b7541d30-56f5-448f-b7ba-a4129f619c3c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.765564] env[68638]: DEBUG oslo_vmware.api [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Task: {'id': task-2832962, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.770474] env[68638]: DEBUG oslo_vmware.api [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Waiting for the task: (returnval){ [ 579.770474] env[68638]: value = "task-2832963" [ 579.770474] env[68638]: _type = "Task" [ 579.770474] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.779753] env[68638]: DEBUG oslo_vmware.api [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2832963, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.806840] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "423af2cc-4dea-445f-a01c-6d4d57c3f0de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 579.807086] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "423af2cc-4dea-445f-a01c-6d4d57c3f0de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 579.892241] env[68638]: DEBUG nova.network.neutron [req-dd65d6e8-3578-4af0-a03a-302846b05217 req-e2654d14-57f9-4b7b-9627-eef415d7bcc2 service nova] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Updated VIF entry in instance network info cache for port bcca1912-c5f3-4a93-a224-b3707bd3c2b0. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 579.892971] env[68638]: DEBUG nova.network.neutron [req-dd65d6e8-3578-4af0-a03a-302846b05217 req-e2654d14-57f9-4b7b-9627-eef415d7bcc2 service nova] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Updating instance_info_cache with network_info: [{"id": "bcca1912-c5f3-4a93-a224-b3707bd3c2b0", "address": "fa:16:3e:41:77:17", "network": {"id": "e3ba6b79-0a0f-4f1b-815d-91c564b9aa96", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1103305689-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f86fe0709e74230a2688619955c9483", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcca1912-c5", "ovs_interfaceid": "bcca1912-c5f3-4a93-a224-b3707bd3c2b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 580.104891] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2832960, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.183130] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance c80895d5-1a59-4779-9da9-9aeec10bc395 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 580.239881] env[68638]: DEBUG oslo_vmware.api [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': task-2832961, 'name': PowerOnVM_Task, 'duration_secs': 0.523546} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.240252] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 580.240932] env[68638]: INFO nova.compute.manager [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Took 8.86 seconds to spawn the instance on the hypervisor. [ 580.241289] env[68638]: DEBUG nova.compute.manager [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 580.242200] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8056231c-d02a-41d7-933a-ca51627beb4a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.257375] env[68638]: DEBUG oslo_vmware.api [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Task: {'id': task-2832962, 'name': ReconfigVM_Task, 'duration_secs': 0.389611} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.257895] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 54af9c38-c8b6-4ef9-be63-de545dcc0da5/54af9c38-c8b6-4ef9-be63-de545dcc0da5.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 580.258787] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eb65f86d-85b6-4c1a-9398-0e5a0674f142 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.265376] env[68638]: DEBUG oslo_vmware.api [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Waiting for the task: (returnval){ [ 580.265376] env[68638]: value = "task-2832964" [ 580.265376] env[68638]: _type = "Task" [ 580.265376] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.273720] env[68638]: DEBUG nova.network.neutron [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Successfully updated port: 41ce015b-dfb7-4031-a11b-8dfd0e29bb62 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 580.289899] env[68638]: DEBUG oslo_vmware.api [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2832963, 'name': Rename_Task, 'duration_secs': 0.29277} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.291048] env[68638]: DEBUG oslo_vmware.api [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Task: {'id': task-2832964, 'name': Rename_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.291244] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 580.291484] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ed99f227-505a-4584-9634-430712bd6fc5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.298227] env[68638]: DEBUG oslo_vmware.api [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Waiting for the task: (returnval){ [ 580.298227] env[68638]: value = "task-2832965" [ 580.298227] env[68638]: _type = "Task" [ 580.298227] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.308512] env[68638]: DEBUG oslo_vmware.api [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2832965, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.309619] env[68638]: DEBUG nova.compute.manager [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 580.396365] env[68638]: DEBUG oslo_concurrency.lockutils [req-dd65d6e8-3578-4af0-a03a-302846b05217 req-e2654d14-57f9-4b7b-9627-eef415d7bcc2 service nova] Releasing lock "refresh_cache-54af9c38-c8b6-4ef9-be63-de545dcc0da5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 580.606489] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2832960, 'name': CreateVM_Task, 'duration_secs': 0.567355} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.606802] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 580.607906] env[68638]: DEBUG oslo_vmware.service [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd43a95-1698-4ad8-94c0-75ffa80f2c95 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.614281] env[68638]: DEBUG oslo_concurrency.lockutils [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 580.614451] env[68638]: DEBUG oslo_concurrency.lockutils [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 580.614832] env[68638]: DEBUG oslo_concurrency.lockutils [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 580.615095] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c1b659b-0291-4e60-802e-3f049dee71c6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.619900] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 580.619900] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52621bd9-03a1-8d4a-9ece-02b2db49e060" [ 580.619900] env[68638]: _type = "Task" [ 580.619900] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.631914] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52621bd9-03a1-8d4a-9ece-02b2db49e060, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.665622] env[68638]: DEBUG nova.compute.manager [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 580.692118] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 14772ba8-bde2-42ef-9a37-df876c8af321 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 580.708310] env[68638]: DEBUG nova.virt.hardware [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 580.708310] env[68638]: DEBUG nova.virt.hardware [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 580.708310] env[68638]: DEBUG nova.virt.hardware [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 580.708476] env[68638]: DEBUG nova.virt.hardware [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 580.708476] env[68638]: DEBUG nova.virt.hardware [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 580.708476] env[68638]: DEBUG nova.virt.hardware [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 580.708476] env[68638]: DEBUG nova.virt.hardware [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 580.708741] env[68638]: DEBUG nova.virt.hardware [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 580.708958] env[68638]: DEBUG nova.virt.hardware [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 580.709154] env[68638]: DEBUG nova.virt.hardware [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 580.709345] env[68638]: DEBUG nova.virt.hardware [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 580.712139] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc86a47-b87f-4c2e-bad8-67885d4c89c7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.722585] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49238b1e-0df8-4904-abca-633b2d6cdf16 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.765557] env[68638]: INFO nova.compute.manager [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Took 13.64 seconds to build instance. [ 580.780120] env[68638]: DEBUG oslo_vmware.api [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Task: {'id': task-2832964, 'name': Rename_Task, 'duration_secs': 0.255883} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.780120] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 580.780120] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2cbab602-bd66-4518-b90e-0015e4a69491 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.782207] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "refresh_cache-7617a7b1-3b21-4d38-b090-1d35bc74637b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 580.782508] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquired lock "refresh_cache-7617a7b1-3b21-4d38-b090-1d35bc74637b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 580.783175] env[68638]: DEBUG nova.network.neutron [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 580.789772] env[68638]: DEBUG oslo_vmware.api [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Waiting for the task: (returnval){ [ 580.789772] env[68638]: value = "task-2832966" [ 580.789772] env[68638]: _type = "Task" [ 580.789772] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.799878] env[68638]: DEBUG oslo_vmware.api [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Task: {'id': task-2832966, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.811690] env[68638]: DEBUG oslo_vmware.api [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2832965, 'name': PowerOnVM_Task, 'duration_secs': 0.492219} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.811969] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 580.812177] env[68638]: INFO nova.compute.manager [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Took 12.09 seconds to spawn the instance on the hypervisor. [ 580.812356] env[68638]: DEBUG nova.compute.manager [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 580.813423] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0553b4f-73f2-45ed-b581-cf3806291850 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.844036] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 581.136812] env[68638]: DEBUG oslo_concurrency.lockutils [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 581.136947] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 581.137222] env[68638]: DEBUG oslo_concurrency.lockutils [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 581.137368] env[68638]: DEBUG oslo_concurrency.lockutils [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 581.137549] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 581.138168] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-972fcf90-9df8-441b-980c-c8ba0597b1c5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.160426] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 581.161164] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 581.161505] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd61b06-de85-42cd-a46f-fc02e3768123 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.170280] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4409b15f-f40f-4d67-b858-06ad9805507a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.176122] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 581.176122] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52f81d0e-b48b-8670-f824-fa73e159e2e5" [ 581.176122] env[68638]: _type = "Task" [ 581.176122] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.185466] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f81d0e-b48b-8670-f824-fa73e159e2e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.200160] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 423af2cc-4dea-445f-a01c-6d4d57c3f0de has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 581.200398] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 581.200539] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 581.267599] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ac70e62f-d249-4921-a372-249404c25526 tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Lock "4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.152s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 581.306831] env[68638]: DEBUG oslo_vmware.api [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Task: {'id': task-2832966, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.345721] env[68638]: INFO nova.compute.manager [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Took 16.87 seconds to build instance. [ 581.418727] env[68638]: DEBUG nova.network.neutron [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 581.441382] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea7fa1c-220a-448f-bca4-00ef9b81f933 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.460622] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a15a1d2-2bfa-4170-a46a-30c74f87bac2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.505021] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-171d62ff-6677-49b3-8b65-000091c237bd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.510463] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73eb31f2-edea-412b-a6b5-81f425dae5ef {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.529151] env[68638]: DEBUG nova.compute.provider_tree [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 581.656409] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Acquiring lock "c71693e9-aeaa-4f12-b5cf-a179e558505d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 581.656740] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Lock "c71693e9-aeaa-4f12-b5cf-a179e558505d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 581.687165] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Preparing fetch location {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 581.687374] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Creating directory with path [datastore2] vmware_temp/43decdf5-9fff-4f43-b5b0-fff21b9bb7df/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 581.687755] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2eddbd9b-ab5d-4d21-b2fd-876c40ade7b6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.723850] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Created directory with path [datastore2] vmware_temp/43decdf5-9fff-4f43-b5b0-fff21b9bb7df/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 581.723850] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Fetch image to [datastore2] vmware_temp/43decdf5-9fff-4f43-b5b0-fff21b9bb7df/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/tmp-sparse.vmdk {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 581.723850] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Downloading image file data ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 to [datastore2] vmware_temp/43decdf5-9fff-4f43-b5b0-fff21b9bb7df/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/tmp-sparse.vmdk on the data store datastore2 {{(pid=68638) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 581.727724] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4050da2-4a40-4718-b014-234bd44779a7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.744257] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a01bd9d0-144c-4ca6-be33-4fdd5e177878 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.758317] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad30ddbb-4e76-4cc4-885e-2abda0ef44a5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.800535] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40634413-151a-4510-8e59-6d82e308b702 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.816043] env[68638]: DEBUG oslo_vmware.api [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Task: {'id': task-2832966, 'name': PowerOnVM_Task, 'duration_secs': 0.908122} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.817029] env[68638]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c0c4816e-f3a3-4a32-a1be-5f15cea7e33a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.819070] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 581.819296] env[68638]: INFO nova.compute.manager [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Took 7.94 seconds to spawn the instance on the hypervisor. [ 581.819561] env[68638]: DEBUG nova.compute.manager [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 581.820291] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08423813-d2fd-43f3-aa11-a8516e539d51 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.848153] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e64bdce1-7a8d-41d0-94a6-00057299be82 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Lock "05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.382s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 581.910664] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Downloading image file data ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 to the data store datastore2 {{(pid=68638) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 581.939491] env[68638]: DEBUG nova.network.neutron [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Updating instance_info_cache with network_info: [{"id": "41ce015b-dfb7-4031-a11b-8dfd0e29bb62", "address": "fa:16:3e:17:75:81", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.112", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41ce015b-df", "ovs_interfaceid": "41ce015b-dfb7-4031-a11b-8dfd0e29bb62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 581.982643] env[68638]: DEBUG oslo_vmware.rw_handles [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/43decdf5-9fff-4f43-b5b0-fff21b9bb7df/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68638) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 582.043749] env[68638]: DEBUG nova.scheduler.client.report [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 582.052435] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Acquiring lock "a5e993de-7aad-4b34-8946-563dc69a6f25" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 582.052435] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Lock "a5e993de-7aad-4b34-8946-563dc69a6f25" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 582.159083] env[68638]: DEBUG nova.compute.manager [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 582.197568] env[68638]: DEBUG nova.network.neutron [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Successfully updated port: 63f69876-6edd-4869-b1f4-40bf4dd16383 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 582.343841] env[68638]: INFO nova.compute.manager [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Took 12.72 seconds to build instance. [ 582.441653] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Releasing lock "refresh_cache-7617a7b1-3b21-4d38-b090-1d35bc74637b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 582.442020] env[68638]: DEBUG nova.compute.manager [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Instance network_info: |[{"id": "41ce015b-dfb7-4031-a11b-8dfd0e29bb62", "address": "fa:16:3e:17:75:81", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.112", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41ce015b-df", "ovs_interfaceid": "41ce015b-dfb7-4031-a11b-8dfd0e29bb62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 582.442900] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:75:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '205fb402-8eaf-4b61-8f57-8f216024179a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '41ce015b-dfb7-4031-a11b-8dfd0e29bb62', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 582.451994] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Creating folder: Project (e92752f6508d4e0eae7e29247444a38f). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 582.452859] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-082dbc25-ae14-45b9-ade9-87c44d90eb7d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.467947] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Created folder: Project (e92752f6508d4e0eae7e29247444a38f) in parent group-v569734. [ 582.468398] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Creating folder: Instances. Parent ref: group-v569747. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 582.468562] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fb3f417f-e436-47c0-b0a2-579f26b2ee9c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.478315] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Created folder: Instances in parent group-v569747. [ 582.479519] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 582.479519] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 582.479519] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f94e3194-e65a-44bf-9a4d-c3cb075b0d99 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.506643] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 582.506643] env[68638]: value = "task-2832969" [ 582.506643] env[68638]: _type = "Task" [ 582.506643] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.518495] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2832969, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.554710] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68638) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 582.554710] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.929s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 582.554710] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.578s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 582.555684] env[68638]: INFO nova.compute.claims [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 582.560348] env[68638]: DEBUG nova.compute.manager [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 582.696869] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 582.701121] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquiring lock "refresh_cache-e3cf739a-3104-473d-af66-d9974ed1a222" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 582.701321] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquired lock "refresh_cache-e3cf739a-3104-473d-af66-d9974ed1a222" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 582.701529] env[68638]: DEBUG nova.network.neutron [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 582.847163] env[68638]: DEBUG oslo_concurrency.lockutils [None req-47bb60ab-0089-4918-8469-4f55ec3c6e8c tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Lock "54af9c38-c8b6-4ef9-be63-de545dcc0da5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.235s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 582.911490] env[68638]: DEBUG oslo_vmware.rw_handles [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Completed reading data from the image iterator. {{(pid=68638) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 582.911596] env[68638]: DEBUG oslo_vmware.rw_handles [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/43decdf5-9fff-4f43-b5b0-fff21b9bb7df/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68638) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 582.975115] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Downloaded image file data ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 to vmware_temp/43decdf5-9fff-4f43-b5b0-fff21b9bb7df/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/tmp-sparse.vmdk on the data store datastore2 {{(pid=68638) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 582.976835] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Caching image {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 582.977157] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Copying Virtual Disk [datastore2] vmware_temp/43decdf5-9fff-4f43-b5b0-fff21b9bb7df/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/tmp-sparse.vmdk to [datastore2] vmware_temp/43decdf5-9fff-4f43-b5b0-fff21b9bb7df/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 582.977405] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-329f853a-6019-4123-9265-d83db88e92c3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.990895] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 582.990895] env[68638]: value = "task-2832970" [ 582.990895] env[68638]: _type = "Task" [ 582.990895] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.003826] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2832970, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.015820] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2832969, 'name': CreateVM_Task, 'duration_secs': 0.341944} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.015820] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 583.016420] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.016525] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 583.016869] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 583.017161] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f26d844-fe09-49eb-b698-3bd7e00741ea {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.022020] env[68638]: DEBUG oslo_vmware.api [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 583.022020] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5284c6f1-808c-2713-35d8-40d486fe9047" [ 583.022020] env[68638]: _type = "Task" [ 583.022020] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.030749] env[68638]: DEBUG oslo_vmware.api [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5284c6f1-808c-2713-35d8-40d486fe9047, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.080041] env[68638]: DEBUG nova.compute.manager [req-106d0ac8-bcd3-45d2-9833-254f2b8f0565 req-5741cbbf-9cf4-4eb0-8e93-8f2954122b9a service nova] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Received event network-changed-cdd18e96-0a08-4bc0-9252-0044e54e0084 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 583.080161] env[68638]: DEBUG nova.compute.manager [req-106d0ac8-bcd3-45d2-9833-254f2b8f0565 req-5741cbbf-9cf4-4eb0-8e93-8f2954122b9a service nova] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Refreshing instance network info cache due to event network-changed-cdd18e96-0a08-4bc0-9252-0044e54e0084. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 583.080420] env[68638]: DEBUG oslo_concurrency.lockutils [req-106d0ac8-bcd3-45d2-9833-254f2b8f0565 req-5741cbbf-9cf4-4eb0-8e93-8f2954122b9a service nova] Acquiring lock "refresh_cache-6cb1846a-02aa-4dc3-a573-858abf5a0bdf" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.080601] env[68638]: DEBUG oslo_concurrency.lockutils [req-106d0ac8-bcd3-45d2-9833-254f2b8f0565 req-5741cbbf-9cf4-4eb0-8e93-8f2954122b9a service nova] Acquired lock "refresh_cache-6cb1846a-02aa-4dc3-a573-858abf5a0bdf" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 583.081340] env[68638]: DEBUG nova.network.neutron [req-106d0ac8-bcd3-45d2-9833-254f2b8f0565 req-5741cbbf-9cf4-4eb0-8e93-8f2954122b9a service nova] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Refreshing network info cache for port cdd18e96-0a08-4bc0-9252-0044e54e0084 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 583.090501] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 583.250744] env[68638]: DEBUG nova.network.neutron [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 583.396177] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Acquiring lock "ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 583.396445] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Lock "ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 583.432161] env[68638]: DEBUG nova.network.neutron [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Updating instance_info_cache with network_info: [{"id": "63f69876-6edd-4869-b1f4-40bf4dd16383", "address": "fa:16:3e:f8:e5:04", "network": {"id": "4ee2b8ce-01eb-4d0d-8592-64c3bbb04c98", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2125764351-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "b9ffb656ebf844d4b71f49b35a594d4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63f69876-6e", "ovs_interfaceid": "63f69876-6edd-4869-b1f4-40bf4dd16383", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 583.502292] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2832970, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.535211] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 583.535868] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 583.535868] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.569115] env[68638]: DEBUG nova.compute.manager [None req-82803b1e-af1b-4a46-8fb8-c650df2d6d2f tempest-ServerDiagnosticsV248Test-1708395640 tempest-ServerDiagnosticsV248Test-1708395640-project-admin] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 583.570334] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097f9702-9f9c-413a-8969-ff00bb771ab1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.584034] env[68638]: INFO nova.compute.manager [None req-82803b1e-af1b-4a46-8fb8-c650df2d6d2f tempest-ServerDiagnosticsV248Test-1708395640 tempest-ServerDiagnosticsV248Test-1708395640-project-admin] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Retrieving diagnostics [ 583.589355] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d3d0c3-3c17-4e2e-8954-80056cb00fb1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.834806] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baee0dc1-9d9b-4987-9096-8cddf7956634 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.846250] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8d9996-5219-4d7e-be24-1791c58d3e89 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.879664] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c63917-1676-43c6-b60b-a2576605cad9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.888463] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-830a9b16-f262-4005-9914-59498c606da6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.904366] env[68638]: DEBUG nova.compute.manager [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 583.907203] env[68638]: DEBUG nova.compute.provider_tree [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 583.934793] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Releasing lock "refresh_cache-e3cf739a-3104-473d-af66-d9974ed1a222" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 583.935488] env[68638]: DEBUG nova.compute.manager [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Instance network_info: |[{"id": "63f69876-6edd-4869-b1f4-40bf4dd16383", "address": "fa:16:3e:f8:e5:04", "network": {"id": "4ee2b8ce-01eb-4d0d-8592-64c3bbb04c98", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2125764351-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "b9ffb656ebf844d4b71f49b35a594d4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63f69876-6e", "ovs_interfaceid": "63f69876-6edd-4869-b1f4-40bf4dd16383", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 583.936293] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:e5:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '86a35d07-53d3-46b3-92cb-ae34236c0f41', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '63f69876-6edd-4869-b1f4-40bf4dd16383', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 583.944606] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Creating folder: Project (b9ffb656ebf844d4b71f49b35a594d4c). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 583.945236] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-874fb2cb-d00f-4b15-ab4c-b29306ca8cac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.958548] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Created folder: Project (b9ffb656ebf844d4b71f49b35a594d4c) in parent group-v569734. [ 583.958743] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Creating folder: Instances. Parent ref: group-v569750. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 583.959051] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-49f36e05-8d0b-46e6-a1bb-4fb42ce4c103 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.964273] env[68638]: DEBUG nova.network.neutron [req-106d0ac8-bcd3-45d2-9833-254f2b8f0565 req-5741cbbf-9cf4-4eb0-8e93-8f2954122b9a service nova] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Updated VIF entry in instance network info cache for port cdd18e96-0a08-4bc0-9252-0044e54e0084. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 583.964575] env[68638]: DEBUG nova.network.neutron [req-106d0ac8-bcd3-45d2-9833-254f2b8f0565 req-5741cbbf-9cf4-4eb0-8e93-8f2954122b9a service nova] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Updating instance_info_cache with network_info: [{"id": "cdd18e96-0a08-4bc0-9252-0044e54e0084", "address": "fa:16:3e:0c:e3:1e", "network": {"id": "b3a6cbc1-a4f3-4ceb-b606-42cab79beecb", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1741978212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d2c1dcc55dd42c5b791dd8f1841479b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdd18e96-0a", "ovs_interfaceid": "cdd18e96-0a08-4bc0-9252-0044e54e0084", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 583.968206] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Created folder: Instances in parent group-v569750. [ 583.968435] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 583.968616] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 583.968812] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6b59f882-725d-4d73-bb4b-b4253238aae2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.991991] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 583.991991] env[68638]: value = "task-2832973" [ 583.991991] env[68638]: _type = "Task" [ 583.991991] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.002385] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2832973, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.005629] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2832970, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.695165} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.005846] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Copied Virtual Disk [datastore2] vmware_temp/43decdf5-9fff-4f43-b5b0-fff21b9bb7df/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/tmp-sparse.vmdk to [datastore2] vmware_temp/43decdf5-9fff-4f43-b5b0-fff21b9bb7df/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 584.006097] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Deleting the datastore file [datastore2] vmware_temp/43decdf5-9fff-4f43-b5b0-fff21b9bb7df/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/tmp-sparse.vmdk {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 584.006612] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5efc5384-31f7-4de8-a6ef-7be73a7aa3e3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.012732] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 584.012732] env[68638]: value = "task-2832974" [ 584.012732] env[68638]: _type = "Task" [ 584.012732] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.021413] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2832974, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.412883] env[68638]: DEBUG nova.scheduler.client.report [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 584.467247] env[68638]: DEBUG oslo_concurrency.lockutils [req-106d0ac8-bcd3-45d2-9833-254f2b8f0565 req-5741cbbf-9cf4-4eb0-8e93-8f2954122b9a service nova] Releasing lock "refresh_cache-6cb1846a-02aa-4dc3-a573-858abf5a0bdf" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 584.467549] env[68638]: DEBUG nova.compute.manager [req-106d0ac8-bcd3-45d2-9833-254f2b8f0565 req-5741cbbf-9cf4-4eb0-8e93-8f2954122b9a service nova] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Received event network-vif-plugged-41ce015b-dfb7-4031-a11b-8dfd0e29bb62 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 584.467744] env[68638]: DEBUG oslo_concurrency.lockutils [req-106d0ac8-bcd3-45d2-9833-254f2b8f0565 req-5741cbbf-9cf4-4eb0-8e93-8f2954122b9a service nova] Acquiring lock "7617a7b1-3b21-4d38-b090-1d35bc74637b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 584.467942] env[68638]: DEBUG oslo_concurrency.lockutils [req-106d0ac8-bcd3-45d2-9833-254f2b8f0565 req-5741cbbf-9cf4-4eb0-8e93-8f2954122b9a service nova] Lock "7617a7b1-3b21-4d38-b090-1d35bc74637b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 584.468122] env[68638]: DEBUG oslo_concurrency.lockutils [req-106d0ac8-bcd3-45d2-9833-254f2b8f0565 req-5741cbbf-9cf4-4eb0-8e93-8f2954122b9a service nova] Lock "7617a7b1-3b21-4d38-b090-1d35bc74637b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 584.468292] env[68638]: DEBUG nova.compute.manager [req-106d0ac8-bcd3-45d2-9833-254f2b8f0565 req-5741cbbf-9cf4-4eb0-8e93-8f2954122b9a service nova] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] No waiting events found dispatching network-vif-plugged-41ce015b-dfb7-4031-a11b-8dfd0e29bb62 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 584.468457] env[68638]: WARNING nova.compute.manager [req-106d0ac8-bcd3-45d2-9833-254f2b8f0565 req-5741cbbf-9cf4-4eb0-8e93-8f2954122b9a service nova] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Received unexpected event network-vif-plugged-41ce015b-dfb7-4031-a11b-8dfd0e29bb62 for instance with vm_state building and task_state spawning. [ 584.468615] env[68638]: DEBUG nova.compute.manager [req-106d0ac8-bcd3-45d2-9833-254f2b8f0565 req-5741cbbf-9cf4-4eb0-8e93-8f2954122b9a service nova] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Received event network-changed-41ce015b-dfb7-4031-a11b-8dfd0e29bb62 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 584.468778] env[68638]: DEBUG nova.compute.manager [req-106d0ac8-bcd3-45d2-9833-254f2b8f0565 req-5741cbbf-9cf4-4eb0-8e93-8f2954122b9a service nova] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Refreshing instance network info cache due to event network-changed-41ce015b-dfb7-4031-a11b-8dfd0e29bb62. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 584.468964] env[68638]: DEBUG oslo_concurrency.lockutils [req-106d0ac8-bcd3-45d2-9833-254f2b8f0565 req-5741cbbf-9cf4-4eb0-8e93-8f2954122b9a service nova] Acquiring lock "refresh_cache-7617a7b1-3b21-4d38-b090-1d35bc74637b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.469114] env[68638]: DEBUG oslo_concurrency.lockutils [req-106d0ac8-bcd3-45d2-9833-254f2b8f0565 req-5741cbbf-9cf4-4eb0-8e93-8f2954122b9a service nova] Acquired lock "refresh_cache-7617a7b1-3b21-4d38-b090-1d35bc74637b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 584.469271] env[68638]: DEBUG nova.network.neutron [req-106d0ac8-bcd3-45d2-9833-254f2b8f0565 req-5741cbbf-9cf4-4eb0-8e93-8f2954122b9a service nova] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Refreshing network info cache for port 41ce015b-dfb7-4031-a11b-8dfd0e29bb62 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 584.503019] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2832973, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.519198] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 584.529263] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2832974, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.026806} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.530216] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 584.532839] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Moving file from [datastore2] vmware_temp/43decdf5-9fff-4f43-b5b0-fff21b9bb7df/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 to [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9. {{(pid=68638) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 584.532839] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-5df33374-07bc-4f01-86d9-25fa08c08986 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.540746] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 584.540746] env[68638]: value = "task-2832975" [ 584.540746] env[68638]: _type = "Task" [ 584.540746] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.552522] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2832975, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.667146] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] Acquiring lock "05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 584.667372] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] Lock "05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 584.667562] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] Acquiring lock "05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 584.671310] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] Lock "05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 584.671310] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] Lock "05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.002s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 584.673586] env[68638]: INFO nova.compute.manager [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Terminating instance [ 584.922675] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.367s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 584.922675] env[68638]: DEBUG nova.compute.manager [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 584.932078] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.295s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 584.932925] env[68638]: INFO nova.compute.claims [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 585.002325] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2832973, 'name': CreateVM_Task, 'duration_secs': 0.672644} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.002531] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 585.003474] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.003474] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 585.004215] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 585.004215] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66ccd48e-8dcb-4f21-a300-f4bb78773067 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.008909] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Waiting for the task: (returnval){ [ 585.008909] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528b2f48-0c6b-a16b-6083-d394be8a3027" [ 585.008909] env[68638]: _type = "Task" [ 585.008909] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.017287] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528b2f48-0c6b-a16b-6083-d394be8a3027, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.050212] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2832975, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.028303} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.050471] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] File moved {{(pid=68638) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 585.050670] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Cleaning up location [datastore2] vmware_temp/43decdf5-9fff-4f43-b5b0-fff21b9bb7df {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 585.050849] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Deleting the datastore file [datastore2] vmware_temp/43decdf5-9fff-4f43-b5b0-fff21b9bb7df {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 585.051153] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e07f21f-6e01-47b9-a809-6c4cacecfb31 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.059602] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 585.059602] env[68638]: value = "task-2832976" [ 585.059602] env[68638]: _type = "Task" [ 585.059602] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.067753] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2832976, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.180786] env[68638]: DEBUG nova.compute.manager [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 585.181086] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 585.182086] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73591f74-dfdb-4fae-9b59-acae56d71011 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.190585] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 585.190585] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-189da30f-bf5b-452c-903f-b21ce2278526 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.197614] env[68638]: DEBUG oslo_vmware.api [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] Waiting for the task: (returnval){ [ 585.197614] env[68638]: value = "task-2832977" [ 585.197614] env[68638]: _type = "Task" [ 585.197614] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.211461] env[68638]: DEBUG oslo_vmware.api [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] Task: {'id': task-2832977, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.430904] env[68638]: DEBUG nova.network.neutron [req-106d0ac8-bcd3-45d2-9833-254f2b8f0565 req-5741cbbf-9cf4-4eb0-8e93-8f2954122b9a service nova] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Updated VIF entry in instance network info cache for port 41ce015b-dfb7-4031-a11b-8dfd0e29bb62. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 585.431495] env[68638]: DEBUG nova.network.neutron [req-106d0ac8-bcd3-45d2-9833-254f2b8f0565 req-5741cbbf-9cf4-4eb0-8e93-8f2954122b9a service nova] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Updating instance_info_cache with network_info: [{"id": "41ce015b-dfb7-4031-a11b-8dfd0e29bb62", "address": "fa:16:3e:17:75:81", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.112", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41ce015b-df", "ovs_interfaceid": "41ce015b-dfb7-4031-a11b-8dfd0e29bb62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.438383] env[68638]: DEBUG nova.compute.utils [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 585.444985] env[68638]: DEBUG nova.compute.manager [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 585.445201] env[68638]: DEBUG nova.network.neutron [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 585.522460] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528b2f48-0c6b-a16b-6083-d394be8a3027, 'name': SearchDatastore_Task, 'duration_secs': 0.011267} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.522950] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 585.523198] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 585.523414] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.527403] env[68638]: DEBUG nova.policy [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '72b944a0d853408fa82d313bdc7b8bac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '269a5618b37e42189dca254a5073c269', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 585.571892] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2832976, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.028748} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.573037] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 585.574320] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18e38ad4-e077-49d8-9dab-bb61683f16bf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.581257] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 585.581257] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52c24a3c-22a7-b9d4-c45d-8cfe36ed21bb" [ 585.581257] env[68638]: _type = "Task" [ 585.581257] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.591832] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52c24a3c-22a7-b9d4-c45d-8cfe36ed21bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.658412] env[68638]: DEBUG nova.compute.manager [req-8cb159de-54e1-472c-8117-10158a8fd6cf req-368d6d67-3b08-4c9c-9d4c-7affa4be0168 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Received event network-vif-plugged-63f69876-6edd-4869-b1f4-40bf4dd16383 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 585.658506] env[68638]: DEBUG oslo_concurrency.lockutils [req-8cb159de-54e1-472c-8117-10158a8fd6cf req-368d6d67-3b08-4c9c-9d4c-7affa4be0168 service nova] Acquiring lock "e3cf739a-3104-473d-af66-d9974ed1a222-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 585.659276] env[68638]: DEBUG oslo_concurrency.lockutils [req-8cb159de-54e1-472c-8117-10158a8fd6cf req-368d6d67-3b08-4c9c-9d4c-7affa4be0168 service nova] Lock "e3cf739a-3104-473d-af66-d9974ed1a222-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 585.659276] env[68638]: DEBUG oslo_concurrency.lockutils [req-8cb159de-54e1-472c-8117-10158a8fd6cf req-368d6d67-3b08-4c9c-9d4c-7affa4be0168 service nova] Lock "e3cf739a-3104-473d-af66-d9974ed1a222-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 585.659666] env[68638]: DEBUG nova.compute.manager [req-8cb159de-54e1-472c-8117-10158a8fd6cf req-368d6d67-3b08-4c9c-9d4c-7affa4be0168 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] No waiting events found dispatching network-vif-plugged-63f69876-6edd-4869-b1f4-40bf4dd16383 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 585.660859] env[68638]: WARNING nova.compute.manager [req-8cb159de-54e1-472c-8117-10158a8fd6cf req-368d6d67-3b08-4c9c-9d4c-7affa4be0168 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Received unexpected event network-vif-plugged-63f69876-6edd-4869-b1f4-40bf4dd16383 for instance with vm_state building and task_state spawning. [ 585.660859] env[68638]: DEBUG nova.compute.manager [req-8cb159de-54e1-472c-8117-10158a8fd6cf req-368d6d67-3b08-4c9c-9d4c-7affa4be0168 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Received event network-changed-63f69876-6edd-4869-b1f4-40bf4dd16383 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 585.660859] env[68638]: DEBUG nova.compute.manager [req-8cb159de-54e1-472c-8117-10158a8fd6cf req-368d6d67-3b08-4c9c-9d4c-7affa4be0168 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Refreshing instance network info cache due to event network-changed-63f69876-6edd-4869-b1f4-40bf4dd16383. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 585.661055] env[68638]: DEBUG oslo_concurrency.lockutils [req-8cb159de-54e1-472c-8117-10158a8fd6cf req-368d6d67-3b08-4c9c-9d4c-7affa4be0168 service nova] Acquiring lock "refresh_cache-e3cf739a-3104-473d-af66-d9974ed1a222" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.661298] env[68638]: DEBUG oslo_concurrency.lockutils [req-8cb159de-54e1-472c-8117-10158a8fd6cf req-368d6d67-3b08-4c9c-9d4c-7affa4be0168 service nova] Acquired lock "refresh_cache-e3cf739a-3104-473d-af66-d9974ed1a222" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 585.661822] env[68638]: DEBUG nova.network.neutron [req-8cb159de-54e1-472c-8117-10158a8fd6cf req-368d6d67-3b08-4c9c-9d4c-7affa4be0168 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Refreshing network info cache for port 63f69876-6edd-4869-b1f4-40bf4dd16383 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 585.712506] env[68638]: DEBUG oslo_vmware.api [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] Task: {'id': task-2832977, 'name': PowerOffVM_Task, 'duration_secs': 0.215966} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.712769] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 585.712931] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 585.713544] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8f65ac64-80f1-4e28-9077-53390365da95 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.772444] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Acquiring lock "4b5c5b9e-389d-4ed9-a860-bd41a33fbac4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 585.772772] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Lock "4b5c5b9e-389d-4ed9-a860-bd41a33fbac4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 585.776529] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 585.776745] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 585.776936] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] Deleting the datastore file [datastore1] 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 585.777401] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-946bed2e-dd77-4a1c-9ba3-8b3604f3d528 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.784939] env[68638]: DEBUG oslo_vmware.api [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] Waiting for the task: (returnval){ [ 585.784939] env[68638]: value = "task-2832979" [ 585.784939] env[68638]: _type = "Task" [ 585.784939] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.794343] env[68638]: DEBUG oslo_vmware.api [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] Task: {'id': task-2832979, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.933977] env[68638]: DEBUG oslo_concurrency.lockutils [req-106d0ac8-bcd3-45d2-9833-254f2b8f0565 req-5741cbbf-9cf4-4eb0-8e93-8f2954122b9a service nova] Releasing lock "refresh_cache-7617a7b1-3b21-4d38-b090-1d35bc74637b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 585.949592] env[68638]: DEBUG nova.compute.manager [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 586.076878] env[68638]: DEBUG nova.network.neutron [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Successfully created port: d9c26596-0dec-45f8-9efd-781be344a670 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 586.093950] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52c24a3c-22a7-b9d4-c45d-8cfe36ed21bb, 'name': SearchDatastore_Task, 'duration_secs': 0.01825} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.097219] env[68638]: DEBUG oslo_concurrency.lockutils [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 586.097303] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 6cb1846a-02aa-4dc3-a573-858abf5a0bdf/6cb1846a-02aa-4dc3-a573-858abf5a0bdf.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 586.097777] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 586.097969] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 586.098197] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e53cbf39-d7d8-4a4a-97a1-23fc61845c3e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.100411] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c494d65-896a-4049-ae47-3fd7f6f31ad8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.107379] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 586.107379] env[68638]: value = "task-2832980" [ 586.107379] env[68638]: _type = "Task" [ 586.107379] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.114077] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 586.114077] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 586.114077] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12647e95-1458-4571-adee-89c0fc711d64 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.121583] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2832980, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.124822] env[68638]: DEBUG oslo_vmware.api [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 586.124822] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52af741c-a106-d1b7-ff63-120f0a5a1dcd" [ 586.124822] env[68638]: _type = "Task" [ 586.124822] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.132637] env[68638]: DEBUG oslo_vmware.api [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52af741c-a106-d1b7-ff63-120f0a5a1dcd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.193318] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bbeb8a7-3b31-4095-a089-19e8f483b7bd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.201306] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8016185b-0452-4153-9033-0dafb2fee62a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.236930] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-405c8d0c-86af-4c72-81cb-6075331e67ef {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.247133] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dbfe6e7-4987-46b3-8c5c-210716d04111 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.265220] env[68638]: DEBUG nova.compute.provider_tree [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 586.278065] env[68638]: DEBUG nova.compute.manager [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 586.304948] env[68638]: DEBUG oslo_vmware.api [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] Task: {'id': task-2832979, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163874} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.304948] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 586.304948] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 586.304948] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 586.304948] env[68638]: INFO nova.compute.manager [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Took 1.12 seconds to destroy the instance on the hypervisor. [ 586.307549] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 586.307549] env[68638]: DEBUG nova.compute.manager [-] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 586.307549] env[68638]: DEBUG nova.network.neutron [-] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 586.622727] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2832980, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.636319] env[68638]: DEBUG oslo_vmware.api [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52af741c-a106-d1b7-ff63-120f0a5a1dcd, 'name': SearchDatastore_Task, 'duration_secs': 0.008943} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.637358] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab7bf5af-3f15-472b-b9e7-0707d33448d7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.644866] env[68638]: DEBUG oslo_vmware.api [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 586.644866] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5229cdb2-59b2-e4d0-a62b-605a5344e552" [ 586.644866] env[68638]: _type = "Task" [ 586.644866] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.656161] env[68638]: DEBUG oslo_vmware.api [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5229cdb2-59b2-e4d0-a62b-605a5344e552, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.772451] env[68638]: DEBUG nova.scheduler.client.report [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 586.815067] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 586.968390] env[68638]: DEBUG nova.compute.manager [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 587.002544] env[68638]: DEBUG nova.virt.hardware [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 587.004024] env[68638]: DEBUG nova.virt.hardware [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 587.004024] env[68638]: DEBUG nova.virt.hardware [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 587.004024] env[68638]: DEBUG nova.virt.hardware [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 587.004024] env[68638]: DEBUG nova.virt.hardware [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 587.004024] env[68638]: DEBUG nova.virt.hardware [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 587.004262] env[68638]: DEBUG nova.virt.hardware [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 587.004665] env[68638]: DEBUG nova.virt.hardware [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 587.004985] env[68638]: DEBUG nova.virt.hardware [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 587.005523] env[68638]: DEBUG nova.virt.hardware [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 587.005882] env[68638]: DEBUG nova.virt.hardware [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 587.006897] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c6afb8-189f-4113-87a5-4a2c8e60123e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.020181] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43a3d65c-05ed-4303-b295-12105c079631 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.107384] env[68638]: DEBUG nova.network.neutron [req-8cb159de-54e1-472c-8117-10158a8fd6cf req-368d6d67-3b08-4c9c-9d4c-7affa4be0168 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Updated VIF entry in instance network info cache for port 63f69876-6edd-4869-b1f4-40bf4dd16383. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 587.107384] env[68638]: DEBUG nova.network.neutron [req-8cb159de-54e1-472c-8117-10158a8fd6cf req-368d6d67-3b08-4c9c-9d4c-7affa4be0168 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Updating instance_info_cache with network_info: [{"id": "63f69876-6edd-4869-b1f4-40bf4dd16383", "address": "fa:16:3e:f8:e5:04", "network": {"id": "4ee2b8ce-01eb-4d0d-8592-64c3bbb04c98", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2125764351-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "b9ffb656ebf844d4b71f49b35a594d4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63f69876-6e", "ovs_interfaceid": "63f69876-6edd-4869-b1f4-40bf4dd16383", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.121349] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2832980, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.5141} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.125845] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 6cb1846a-02aa-4dc3-a573-858abf5a0bdf/6cb1846a-02aa-4dc3-a573-858abf5a0bdf.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 587.125845] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 587.125845] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-40fc0ce7-7cb2-48a6-b809-a979d8a4461a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.135569] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 587.135569] env[68638]: value = "task-2832981" [ 587.135569] env[68638]: _type = "Task" [ 587.135569] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.148198] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2832981, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.158654] env[68638]: DEBUG oslo_vmware.api [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5229cdb2-59b2-e4d0-a62b-605a5344e552, 'name': SearchDatastore_Task, 'duration_secs': 0.009862} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.158993] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 587.159414] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 7617a7b1-3b21-4d38-b090-1d35bc74637b/7617a7b1-3b21-4d38-b090-1d35bc74637b.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 587.159801] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 587.160088] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 587.160386] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c239140d-ca3c-4e6f-88bc-4508a6da6d85 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.163673] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8620661f-388e-425b-95eb-7563cb783fd4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.171481] env[68638]: DEBUG oslo_vmware.api [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 587.171481] env[68638]: value = "task-2832982" [ 587.171481] env[68638]: _type = "Task" [ 587.171481] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.175867] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 587.176082] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 587.177207] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85593a65-5823-4456-8eb9-9e8cbb386b63 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.184300] env[68638]: DEBUG oslo_vmware.api [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2832982, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.185632] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Waiting for the task: (returnval){ [ 587.185632] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52b2490d-ab2c-b653-0e8e-ad6bbdd9f0e2" [ 587.185632] env[68638]: _type = "Task" [ 587.185632] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.193710] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b2490d-ab2c-b653-0e8e-ad6bbdd9f0e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.199730] env[68638]: DEBUG nova.network.neutron [-] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.278638] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.350s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 587.279290] env[68638]: DEBUG nova.compute.manager [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 587.284075] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.440s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 587.286727] env[68638]: INFO nova.compute.claims [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 587.366165] env[68638]: DEBUG nova.compute.manager [req-d7f4b019-1aea-4453-8c8e-01f6133727b0 req-be3f83e3-391e-4b13-a719-d28ce1879c34 service nova] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Received event network-vif-deleted-ddb66c99-6c88-4e52-a360-3b2778771361 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 587.610278] env[68638]: DEBUG oslo_concurrency.lockutils [req-8cb159de-54e1-472c-8117-10158a8fd6cf req-368d6d67-3b08-4c9c-9d4c-7affa4be0168 service nova] Releasing lock "refresh_cache-e3cf739a-3104-473d-af66-d9974ed1a222" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 587.647648] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2832981, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072405} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.648073] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 587.648891] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf5dffa-13d4-4a44-9fd5-05419380954d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.680157] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] 6cb1846a-02aa-4dc3-a573-858abf5a0bdf/6cb1846a-02aa-4dc3-a573-858abf5a0bdf.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 587.680428] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04a42bbd-9df1-40e2-9d89-2846bbb7907e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.706263] env[68638]: INFO nova.compute.manager [-] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Took 1.40 seconds to deallocate network for instance. [ 587.719147] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 587.719147] env[68638]: value = "task-2832983" [ 587.719147] env[68638]: _type = "Task" [ 587.719147] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.725397] env[68638]: DEBUG oslo_vmware.api [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2832982, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.515518} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.734563] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 7617a7b1-3b21-4d38-b090-1d35bc74637b/7617a7b1-3b21-4d38-b090-1d35bc74637b.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 587.734563] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 587.734563] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b2490d-ab2c-b653-0e8e-ad6bbdd9f0e2, 'name': SearchDatastore_Task, 'duration_secs': 0.009401} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.734563] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-551b1452-580a-4555-b94a-e75dc55e3691 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.737658] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdc804a4-0178-46a0-b32a-e702189da860 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.745202] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2832983, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.749189] env[68638]: DEBUG oslo_vmware.api [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 587.749189] env[68638]: value = "task-2832984" [ 587.749189] env[68638]: _type = "Task" [ 587.749189] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.750305] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Waiting for the task: (returnval){ [ 587.750305] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]527a2174-ad84-f79e-6199-f4ed4d55523b" [ 587.750305] env[68638]: _type = "Task" [ 587.750305] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.766020] env[68638]: DEBUG oslo_vmware.api [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2832984, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.768957] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527a2174-ad84-f79e-6199-f4ed4d55523b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.790154] env[68638]: DEBUG nova.compute.utils [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 587.791922] env[68638]: DEBUG nova.compute.manager [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 587.792394] env[68638]: DEBUG nova.network.neutron [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 587.902273] env[68638]: DEBUG nova.policy [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '87dbe1b58a124d8ba72432b58a711496', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03a342a1ef674059b9ab1a5dc050a82d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 588.218083] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 588.233439] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2832983, 'name': ReconfigVM_Task, 'duration_secs': 0.335323} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.233807] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Reconfigured VM instance instance-00000004 to attach disk [datastore2] 6cb1846a-02aa-4dc3-a573-858abf5a0bdf/6cb1846a-02aa-4dc3-a573-858abf5a0bdf.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 588.234505] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-52047fbf-13d3-4e8a-9e7e-79df7a79c68d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.242219] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 588.242219] env[68638]: value = "task-2832985" [ 588.242219] env[68638]: _type = "Task" [ 588.242219] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.252710] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2832985, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.266284] env[68638]: DEBUG oslo_vmware.api [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2832984, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075373} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.271387] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 588.271387] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527a2174-ad84-f79e-6199-f4ed4d55523b, 'name': SearchDatastore_Task, 'duration_secs': 0.013312} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.272044] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c22cd183-1002-43fa-88c7-050d208fd045 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.277845] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 588.277845] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] e3cf739a-3104-473d-af66-d9974ed1a222/e3cf739a-3104-473d-af66-d9974ed1a222.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 588.277845] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1d0d900c-4db4-4d4c-9410-6d348d43b149 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.308133] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] 7617a7b1-3b21-4d38-b090-1d35bc74637b/7617a7b1-3b21-4d38-b090-1d35bc74637b.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 588.310295] env[68638]: DEBUG nova.compute.manager [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 588.315937] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4c7e645-dd86-47dd-b8fd-32b221e85ceb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.333678] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Waiting for the task: (returnval){ [ 588.333678] env[68638]: value = "task-2832986" [ 588.333678] env[68638]: _type = "Task" [ 588.333678] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.341259] env[68638]: DEBUG oslo_vmware.api [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 588.341259] env[68638]: value = "task-2832987" [ 588.341259] env[68638]: _type = "Task" [ 588.341259] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.345098] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2832986, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.353969] env[68638]: DEBUG oslo_vmware.api [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2832987, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.601447] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f056f3b8-5554-46b3-bd90-742b2e08f338 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.611586] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05431718-d401-40e9-bbf3-3450689a7e5f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.650977] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc8f8c4-abdd-4e42-b3b1-f58e628de32e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.660738] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9f1cc9-5c64-46da-900d-ca42c7fba892 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.676889] env[68638]: DEBUG nova.compute.provider_tree [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 588.756346] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2832985, 'name': Rename_Task, 'duration_secs': 0.146206} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.756650] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 588.756914] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-670337ad-df1e-4695-895e-06bf6425ae76 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.764594] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 588.764594] env[68638]: value = "task-2832988" [ 588.764594] env[68638]: _type = "Task" [ 588.764594] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.773990] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2832988, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.854919] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2832986, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.858431] env[68638]: DEBUG oslo_vmware.api [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2832987, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.040589] env[68638]: DEBUG nova.network.neutron [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Successfully created port: 316407a1-ab13-4bd4-98ef-7e090d54399c {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 589.219582] env[68638]: ERROR nova.scheduler.client.report [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [req-510b0973-37f8-49ef-bc27-455562429589] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-510b0973-37f8-49ef-bc27-455562429589"}]} [ 589.242170] env[68638]: DEBUG nova.scheduler.client.report [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 589.260654] env[68638]: DEBUG nova.scheduler.client.report [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 589.260951] env[68638]: DEBUG nova.compute.provider_tree [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 589.274248] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2832988, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.280470] env[68638]: DEBUG nova.scheduler.client.report [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 589.304146] env[68638]: DEBUG nova.scheduler.client.report [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 589.344605] env[68638]: DEBUG nova.compute.manager [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 589.363862] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2832986, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.575938} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.370198] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] e3cf739a-3104-473d-af66-d9974ed1a222/e3cf739a-3104-473d-af66-d9974ed1a222.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 589.370622] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 589.371012] env[68638]: DEBUG oslo_vmware.api [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2832987, 'name': ReconfigVM_Task, 'duration_secs': 0.560129} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.373417] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fb9522a7-46c7-406a-a11c-7ae0275e22e9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.375672] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Reconfigured VM instance instance-00000005 to attach disk [datastore2] 7617a7b1-3b21-4d38-b090-1d35bc74637b/7617a7b1-3b21-4d38-b090-1d35bc74637b.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 589.376804] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d8eb7fe-4356-4a59-8b6a-a54e89b37b16 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.383545] env[68638]: DEBUG nova.virt.hardware [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 589.383821] env[68638]: DEBUG nova.virt.hardware [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 589.383987] env[68638]: DEBUG nova.virt.hardware [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 589.384240] env[68638]: DEBUG nova.virt.hardware [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 589.384957] env[68638]: DEBUG nova.virt.hardware [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 589.385269] env[68638]: DEBUG nova.virt.hardware [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 589.385746] env[68638]: DEBUG nova.virt.hardware [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 589.385983] env[68638]: DEBUG nova.virt.hardware [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 589.386351] env[68638]: DEBUG nova.virt.hardware [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 589.386625] env[68638]: DEBUG nova.virt.hardware [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 589.386879] env[68638]: DEBUG nova.virt.hardware [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 589.391506] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f0b07b-eac4-42fe-a4cf-d77747c3339f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.402892] env[68638]: DEBUG oslo_vmware.api [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 589.402892] env[68638]: value = "task-2832990" [ 589.402892] env[68638]: _type = "Task" [ 589.402892] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.403292] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Waiting for the task: (returnval){ [ 589.403292] env[68638]: value = "task-2832989" [ 589.403292] env[68638]: _type = "Task" [ 589.403292] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.414826] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b57f64d-d9f5-4b59-9dba-929fb27983f7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.427874] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2832989, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.428504] env[68638]: DEBUG oslo_vmware.api [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2832990, 'name': Rename_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.558916] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-878e0c03-df4c-44bc-8780-6b274259bd7e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.569025] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d95b09-ce81-440c-9b37-f33f4175b1f8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.602490] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7356c1b9-2f8f-401c-b369-ad656a6efc1b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.612342] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-898220be-672c-4ac2-bf54-50f0ce1d8d5b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.627678] env[68638]: DEBUG nova.compute.provider_tree [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 589.665170] env[68638]: DEBUG nova.network.neutron [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Successfully updated port: d9c26596-0dec-45f8-9efd-781be344a670 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 589.782807] env[68638]: DEBUG oslo_vmware.api [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2832988, 'name': PowerOnVM_Task, 'duration_secs': 0.537512} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.784122] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 589.784497] env[68638]: INFO nova.compute.manager [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Took 13.64 seconds to spawn the instance on the hypervisor. [ 589.784929] env[68638]: DEBUG nova.compute.manager [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 589.785790] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51d67a71-435d-4e48-8eb0-3b8f6e5186d3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.919503] env[68638]: DEBUG oslo_vmware.api [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2832990, 'name': Rename_Task, 'duration_secs': 0.313733} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.921082] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2832989, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068602} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.921082] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 589.921082] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 589.921082] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b880a0f9-da4f-4002-aa11-1d542d759e90 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.924087] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba1e75d-a150-4401-a26a-6df3933a1125 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.954522] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] e3cf739a-3104-473d-af66-d9974ed1a222/e3cf739a-3104-473d-af66-d9974ed1a222.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 589.957783] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e79a8a1-e12c-4dcb-ad34-3431543ecab2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.974285] env[68638]: DEBUG oslo_vmware.api [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 589.974285] env[68638]: value = "task-2832991" [ 589.974285] env[68638]: _type = "Task" [ 589.974285] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.979742] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Waiting for the task: (returnval){ [ 589.979742] env[68638]: value = "task-2832992" [ 589.979742] env[68638]: _type = "Task" [ 589.979742] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.987108] env[68638]: DEBUG oslo_vmware.api [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2832991, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.992447] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2832992, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.168133] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquiring lock "refresh_cache-c80895d5-1a59-4779-9da9-9aeec10bc395" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.168260] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquired lock "refresh_cache-c80895d5-1a59-4779-9da9-9aeec10bc395" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 590.168446] env[68638]: DEBUG nova.network.neutron [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 590.180623] env[68638]: DEBUG nova.scheduler.client.report [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 17 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 590.180623] env[68638]: DEBUG nova.compute.provider_tree [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 17 to 18 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 590.180623] env[68638]: DEBUG nova.compute.provider_tree [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 590.227755] env[68638]: DEBUG nova.compute.manager [req-b02ff69f-7a14-4514-beda-a1620459c09d req-11e90568-af2a-4b41-8698-71d3f4f29167 service nova] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Received event network-vif-plugged-d9c26596-0dec-45f8-9efd-781be344a670 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 590.228041] env[68638]: DEBUG oslo_concurrency.lockutils [req-b02ff69f-7a14-4514-beda-a1620459c09d req-11e90568-af2a-4b41-8698-71d3f4f29167 service nova] Acquiring lock "c80895d5-1a59-4779-9da9-9aeec10bc395-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 590.228209] env[68638]: DEBUG oslo_concurrency.lockutils [req-b02ff69f-7a14-4514-beda-a1620459c09d req-11e90568-af2a-4b41-8698-71d3f4f29167 service nova] Lock "c80895d5-1a59-4779-9da9-9aeec10bc395-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 590.228372] env[68638]: DEBUG oslo_concurrency.lockutils [req-b02ff69f-7a14-4514-beda-a1620459c09d req-11e90568-af2a-4b41-8698-71d3f4f29167 service nova] Lock "c80895d5-1a59-4779-9da9-9aeec10bc395-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 590.228534] env[68638]: DEBUG nova.compute.manager [req-b02ff69f-7a14-4514-beda-a1620459c09d req-11e90568-af2a-4b41-8698-71d3f4f29167 service nova] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] No waiting events found dispatching network-vif-plugged-d9c26596-0dec-45f8-9efd-781be344a670 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 590.228690] env[68638]: WARNING nova.compute.manager [req-b02ff69f-7a14-4514-beda-a1620459c09d req-11e90568-af2a-4b41-8698-71d3f4f29167 service nova] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Received unexpected event network-vif-plugged-d9c26596-0dec-45f8-9efd-781be344a670 for instance with vm_state building and task_state spawning. [ 590.316123] env[68638]: INFO nova.compute.manager [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Took 20.43 seconds to build instance. [ 590.494571] env[68638]: DEBUG oslo_vmware.api [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2832991, 'name': PowerOnVM_Task} progress is 87%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.497878] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2832992, 'name': ReconfigVM_Task, 'duration_secs': 0.359485} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.498143] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Reconfigured VM instance instance-00000006 to attach disk [datastore2] e3cf739a-3104-473d-af66-d9974ed1a222/e3cf739a-3104-473d-af66-d9974ed1a222.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 590.498747] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a3a06858-f33c-45b6-94d2-6134716e6a57 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.505266] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Waiting for the task: (returnval){ [ 590.505266] env[68638]: value = "task-2832993" [ 590.505266] env[68638]: _type = "Task" [ 590.505266] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.517822] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2832993, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.690350] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.406s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 590.690902] env[68638]: DEBUG nova.compute.manager [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 590.696068] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 590.697650] env[68638]: INFO nova.compute.claims [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 590.743309] env[68638]: DEBUG nova.network.neutron [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 590.819086] env[68638]: DEBUG oslo_concurrency.lockutils [None req-04cb1343-6d10-4dd9-8480-48b9179d9afa tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "6cb1846a-02aa-4dc3-a573-858abf5a0bdf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.943s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 590.995690] env[68638]: DEBUG oslo_vmware.api [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2832991, 'name': PowerOnVM_Task, 'duration_secs': 0.821251} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.996819] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 590.996819] env[68638]: INFO nova.compute.manager [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Took 12.61 seconds to spawn the instance on the hypervisor. [ 590.996819] env[68638]: DEBUG nova.compute.manager [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 590.997161] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a8e8f3c-304e-4bde-b9df-0813e6fee4ba {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.021023] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2832993, 'name': Rename_Task, 'duration_secs': 0.143462} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.021023] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 591.021023] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b820c3b9-e6c8-4f89-839c-83316ded1d72 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.026017] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Waiting for the task: (returnval){ [ 591.026017] env[68638]: value = "task-2832994" [ 591.026017] env[68638]: _type = "Task" [ 591.026017] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.043334] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2832994, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.052515] env[68638]: DEBUG nova.network.neutron [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Updating instance_info_cache with network_info: [{"id": "d9c26596-0dec-45f8-9efd-781be344a670", "address": "fa:16:3e:89:87:69", "network": {"id": "26f8ee83-eaa2-4da8-8f5d-7de1d08f75ef", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-344816381-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "269a5618b37e42189dca254a5073c269", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33fdc099-7497-41c1-b40c-1558937132d4", "external-id": "nsx-vlan-transportzone-764", "segmentation_id": 764, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9c26596-0d", "ovs_interfaceid": "d9c26596-0dec-45f8-9efd-781be344a670", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 591.198241] env[68638]: DEBUG nova.compute.utils [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 591.202948] env[68638]: DEBUG nova.compute.manager [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 591.202948] env[68638]: DEBUG nova.network.neutron [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 591.245385] env[68638]: DEBUG nova.policy [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '075b1dab9233409390d346c7bbfa3d4e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'efa342b9d9a34e9e8e708c8f356f905e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 591.520583] env[68638]: INFO nova.compute.manager [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Took 20.66 seconds to build instance. [ 591.544813] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2832994, 'name': PowerOnVM_Task} progress is 1%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.557010] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Releasing lock "refresh_cache-c80895d5-1a59-4779-9da9-9aeec10bc395" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 591.557010] env[68638]: DEBUG nova.compute.manager [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Instance network_info: |[{"id": "d9c26596-0dec-45f8-9efd-781be344a670", "address": "fa:16:3e:89:87:69", "network": {"id": "26f8ee83-eaa2-4da8-8f5d-7de1d08f75ef", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-344816381-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "269a5618b37e42189dca254a5073c269", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33fdc099-7497-41c1-b40c-1558937132d4", "external-id": "nsx-vlan-transportzone-764", "segmentation_id": 764, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9c26596-0d", "ovs_interfaceid": "d9c26596-0dec-45f8-9efd-781be344a670", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 591.560015] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:87:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33fdc099-7497-41c1-b40c-1558937132d4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd9c26596-0dec-45f8-9efd-781be344a670', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 591.569032] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Creating folder: Project (269a5618b37e42189dca254a5073c269). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 591.569032] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d37d05a8-d771-4353-aa68-b6b980d601fe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.585873] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Created folder: Project (269a5618b37e42189dca254a5073c269) in parent group-v569734. [ 591.586136] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Creating folder: Instances. Parent ref: group-v569753. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 591.586458] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4eac7b9-209e-499a-b42e-97f66c8215bb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.598498] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Created folder: Instances in parent group-v569753. [ 591.598792] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 591.599035] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 591.599300] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-662697c8-321c-4cd7-aa0d-b4a64befac64 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.625674] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 591.625674] env[68638]: value = "task-2832997" [ 591.625674] env[68638]: _type = "Task" [ 591.625674] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.639538] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2832997, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.704050] env[68638]: DEBUG nova.compute.manager [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 591.991093] env[68638]: DEBUG nova.network.neutron [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Successfully updated port: 316407a1-ab13-4bd4-98ef-7e090d54399c {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 592.003690] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f20fdb7-bb39-45b8-be86-6087b642589e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.010973] env[68638]: DEBUG nova.network.neutron [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Successfully created port: 83c2852d-0228-4c4e-b754-0dc81d6b8a11 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 592.016680] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-454de069-72f4-4f83-aa0c-db2a1a1f591e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.055213] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9114cefc-bce2-4687-ac24-0805cb30067c tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "7617a7b1-3b21-4d38-b090-1d35bc74637b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.210s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 592.060300] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-589bc6b5-c41b-4e2a-83a2-97894afd8a33 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.068689] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2832994, 'name': PowerOnVM_Task} progress is 86%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.073134] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ea0039-6582-470e-ba9f-40dd4fdc25bf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.090556] env[68638]: DEBUG nova.compute.provider_tree [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 592.136088] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2832997, 'name': CreateVM_Task, 'duration_secs': 0.358555} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.136193] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 592.137851] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.137851] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 592.137851] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 592.138032] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b850f139-e21f-4d69-beba-1c943f29e622 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.143252] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for the task: (returnval){ [ 592.143252] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d6dead-14be-2a06-b169-54050aea4578" [ 592.143252] env[68638]: _type = "Task" [ 592.143252] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.152744] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d6dead-14be-2a06-b169-54050aea4578, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.359639] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "f767af17-f2bb-461d-9e7f-9c62b5504257" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 592.359917] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "f767af17-f2bb-461d-9e7f-9c62b5504257" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 592.394697] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "8f841b29-0156-414e-8467-c9a9393cdae9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 592.394697] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "8f841b29-0156-414e-8467-c9a9393cdae9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 592.495943] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "refresh_cache-14772ba8-bde2-42ef-9a37-df876c8af321" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.495943] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired lock "refresh_cache-14772ba8-bde2-42ef-9a37-df876c8af321" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 592.495943] env[68638]: DEBUG nova.network.neutron [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 592.567713] env[68638]: DEBUG oslo_vmware.api [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2832994, 'name': PowerOnVM_Task, 'duration_secs': 1.097581} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.568110] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 592.569384] env[68638]: INFO nova.compute.manager [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Took 11.90 seconds to spawn the instance on the hypervisor. [ 592.569384] env[68638]: DEBUG nova.compute.manager [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 592.570209] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c275c221-0136-48ec-a671-ae1291082d17 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.597798] env[68638]: DEBUG nova.scheduler.client.report [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 592.668885] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d6dead-14be-2a06-b169-54050aea4578, 'name': SearchDatastore_Task, 'duration_secs': 0.010902} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.669435] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 592.669489] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 592.669688] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.669839] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 592.672946] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 592.673548] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-475245de-682e-4908-9af5-41c451c2dd15 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.686480] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 592.687274] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 592.687453] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2060c4a-fc3e-4206-a926-94ed62b8f2b5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.696657] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for the task: (returnval){ [ 592.696657] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52985ef9-5429-663f-4bf9-0e455c0cb847" [ 592.696657] env[68638]: _type = "Task" [ 592.696657] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.708811] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52985ef9-5429-663f-4bf9-0e455c0cb847, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.717855] env[68638]: DEBUG nova.compute.manager [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 592.751077] env[68638]: DEBUG nova.virt.hardware [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 592.751077] env[68638]: DEBUG nova.virt.hardware [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 592.751373] env[68638]: DEBUG nova.virt.hardware [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 592.751373] env[68638]: DEBUG nova.virt.hardware [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 592.751540] env[68638]: DEBUG nova.virt.hardware [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 592.751723] env[68638]: DEBUG nova.virt.hardware [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 592.751929] env[68638]: DEBUG nova.virt.hardware [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 592.752054] env[68638]: DEBUG nova.virt.hardware [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 592.752286] env[68638]: DEBUG nova.virt.hardware [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 592.752431] env[68638]: DEBUG nova.virt.hardware [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 592.752962] env[68638]: DEBUG nova.virt.hardware [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 592.753534] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8c1631c-aba4-465d-8702-52990b155487 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.762192] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f1fbbd9-f840-420b-89dc-fb666677507e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.868836] env[68638]: DEBUG nova.compute.manager [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 592.899279] env[68638]: DEBUG nova.compute.manager [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 593.039854] env[68638]: DEBUG nova.network.neutron [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 593.096613] env[68638]: INFO nova.compute.manager [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Took 16.88 seconds to build instance. [ 593.103508] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.407s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 593.104077] env[68638]: DEBUG nova.compute.manager [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 593.107079] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.017s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 593.108812] env[68638]: INFO nova.compute.claims [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 593.214217] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52985ef9-5429-663f-4bf9-0e455c0cb847, 'name': SearchDatastore_Task, 'duration_secs': 0.021936} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.215401] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b9ae594-6142-4776-9ca0-406df931ee46 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.223860] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for the task: (returnval){ [ 593.223860] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]527c3645-213b-1541-fbdd-031827916865" [ 593.223860] env[68638]: _type = "Task" [ 593.223860] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.234399] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527c3645-213b-1541-fbdd-031827916865, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.399310] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 593.411558] env[68638]: DEBUG nova.network.neutron [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Updating instance_info_cache with network_info: [{"id": "316407a1-ab13-4bd4-98ef-7e090d54399c", "address": "fa:16:3e:ca:38:c2", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap316407a1-ab", "ovs_interfaceid": "316407a1-ab13-4bd4-98ef-7e090d54399c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 593.428096] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 593.599752] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7fbba7b6-f98a-467d-b971-116ff456437a tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Lock "e3cf739a-3104-473d-af66-d9974ed1a222" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.396s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 593.616582] env[68638]: DEBUG nova.compute.utils [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 593.619943] env[68638]: DEBUG nova.compute.manager [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Not allocating networking since 'none' was specified. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 593.740956] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527c3645-213b-1541-fbdd-031827916865, 'name': SearchDatastore_Task, 'duration_secs': 0.022599} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.741657] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 593.742045] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] c80895d5-1a59-4779-9da9-9aeec10bc395/c80895d5-1a59-4779-9da9-9aeec10bc395.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 593.742498] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-024aa4a9-8082-447c-b467-a0c21ac761a3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.755310] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for the task: (returnval){ [ 593.755310] env[68638]: value = "task-2832998" [ 593.755310] env[68638]: _type = "Task" [ 593.755310] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.766216] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2832998, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.845626] env[68638]: DEBUG nova.compute.manager [req-e3913de8-366d-4653-96b5-1b1d8b328d24 req-889ccabd-e606-46e1-b7ce-2a119d35c5b6 service nova] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Received event network-changed-d9c26596-0dec-45f8-9efd-781be344a670 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 593.845626] env[68638]: DEBUG nova.compute.manager [req-e3913de8-366d-4653-96b5-1b1d8b328d24 req-889ccabd-e606-46e1-b7ce-2a119d35c5b6 service nova] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Refreshing instance network info cache due to event network-changed-d9c26596-0dec-45f8-9efd-781be344a670. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 593.845922] env[68638]: DEBUG oslo_concurrency.lockutils [req-e3913de8-366d-4653-96b5-1b1d8b328d24 req-889ccabd-e606-46e1-b7ce-2a119d35c5b6 service nova] Acquiring lock "refresh_cache-c80895d5-1a59-4779-9da9-9aeec10bc395" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.845973] env[68638]: DEBUG oslo_concurrency.lockutils [req-e3913de8-366d-4653-96b5-1b1d8b328d24 req-889ccabd-e606-46e1-b7ce-2a119d35c5b6 service nova] Acquired lock "refresh_cache-c80895d5-1a59-4779-9da9-9aeec10bc395" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 593.847853] env[68638]: DEBUG nova.network.neutron [req-e3913de8-366d-4653-96b5-1b1d8b328d24 req-889ccabd-e606-46e1-b7ce-2a119d35c5b6 service nova] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Refreshing network info cache for port d9c26596-0dec-45f8-9efd-781be344a670 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 593.916643] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Releasing lock "refresh_cache-14772ba8-bde2-42ef-9a37-df876c8af321" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 593.916986] env[68638]: DEBUG nova.compute.manager [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Instance network_info: |[{"id": "316407a1-ab13-4bd4-98ef-7e090d54399c", "address": "fa:16:3e:ca:38:c2", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap316407a1-ab", "ovs_interfaceid": "316407a1-ab13-4bd4-98ef-7e090d54399c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 593.917435] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:38:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2180b40f-2bb0-47da-ba80-c2fbe7f98af0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '316407a1-ab13-4bd4-98ef-7e090d54399c', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 593.930327] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Creating folder: Project (03a342a1ef674059b9ab1a5dc050a82d). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 593.932984] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-04976b68-0b00-439f-ba48-8a05aa2f5cfa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.944964] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Created folder: Project (03a342a1ef674059b9ab1a5dc050a82d) in parent group-v569734. [ 593.945179] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Creating folder: Instances. Parent ref: group-v569756. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 593.945427] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c2ddf358-f85e-4eb6-9a53-8f26cd93d4d3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.959042] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Created folder: Instances in parent group-v569756. [ 593.959042] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 593.959042] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 593.959042] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f3415ed2-c462-45d7-92d4-5c7a4ce0b528 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.988778] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 593.988778] env[68638]: value = "task-2833001" [ 593.988778] env[68638]: _type = "Task" [ 593.988778] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.999852] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833001, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.125512] env[68638]: DEBUG nova.compute.manager [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 594.170944] env[68638]: DEBUG nova.network.neutron [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Successfully updated port: 83c2852d-0228-4c4e-b754-0dc81d6b8a11 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 594.270354] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2832998, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.427081] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Acquiring lock "a5dedd3e-a544-4005-bc9b-0735267d6753" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 594.427357] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Lock "a5dedd3e-a544-4005-bc9b-0735267d6753" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 594.506797] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833001, 'name': CreateVM_Task, 'duration_secs': 0.500844} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.506797] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 594.510902] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.512036] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 594.512407] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 594.516691] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c19a0a6b-e377-474e-a4ff-dcb368fdcd28 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.521981] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 594.521981] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52c138d4-8a25-806a-f282-3099032ab7d7" [ 594.521981] env[68638]: _type = "Task" [ 594.521981] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.534039] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52c138d4-8a25-806a-f282-3099032ab7d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.535911] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e9e0900-0009-4939-b6aa-ea1d4b7e1ee6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.546966] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9383b30c-581b-4571-8bb3-cacc528c303c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.593695] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ae2a74-7208-4356-95b8-3ea5ed242494 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.601939] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67225309-4970-4a9e-8169-c1261627830a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.617225] env[68638]: DEBUG nova.compute.provider_tree [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 594.674943] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "refresh_cache-423af2cc-4dea-445f-a01c-6d4d57c3f0de" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.674943] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired lock "refresh_cache-423af2cc-4dea-445f-a01c-6d4d57c3f0de" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 594.674943] env[68638]: DEBUG nova.network.neutron [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 594.771433] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2832998, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.705838} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.771845] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] c80895d5-1a59-4779-9da9-9aeec10bc395/c80895d5-1a59-4779-9da9-9aeec10bc395.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 594.771979] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 594.772257] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c4ec40f6-14b9-4a85-ae07-74e61df885a8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.779751] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for the task: (returnval){ [ 594.779751] env[68638]: value = "task-2833002" [ 594.779751] env[68638]: _type = "Task" [ 594.779751] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.791385] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833002, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.932513] env[68638]: DEBUG nova.compute.manager [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 594.952458] env[68638]: DEBUG nova.network.neutron [req-e3913de8-366d-4653-96b5-1b1d8b328d24 req-889ccabd-e606-46e1-b7ce-2a119d35c5b6 service nova] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Updated VIF entry in instance network info cache for port d9c26596-0dec-45f8-9efd-781be344a670. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 594.952679] env[68638]: DEBUG nova.network.neutron [req-e3913de8-366d-4653-96b5-1b1d8b328d24 req-889ccabd-e606-46e1-b7ce-2a119d35c5b6 service nova] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Updating instance_info_cache with network_info: [{"id": "d9c26596-0dec-45f8-9efd-781be344a670", "address": "fa:16:3e:89:87:69", "network": {"id": "26f8ee83-eaa2-4da8-8f5d-7de1d08f75ef", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-344816381-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "269a5618b37e42189dca254a5073c269", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33fdc099-7497-41c1-b40c-1558937132d4", "external-id": "nsx-vlan-transportzone-764", "segmentation_id": 764, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9c26596-0d", "ovs_interfaceid": "d9c26596-0dec-45f8-9efd-781be344a670", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.037907] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52c138d4-8a25-806a-f282-3099032ab7d7, 'name': SearchDatastore_Task, 'duration_secs': 0.026159} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.038319] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 595.038606] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 595.038857] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.038993] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 595.039215] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 595.039492] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af456bdb-6c17-4e73-bf9f-297c0a920efe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.051497] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 595.051497] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 595.052908] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1a87dee-1e01-4c27-bab5-cdcb736b888d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.059230] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 595.059230] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]529248ec-d516-b4d7-4fbd-e211c311d950" [ 595.059230] env[68638]: _type = "Task" [ 595.059230] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.071628] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]529248ec-d516-b4d7-4fbd-e211c311d950, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.086778] env[68638]: DEBUG oslo_concurrency.lockutils [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "1946baab-bb48-4138-8db6-1f530e432c3d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 595.087167] env[68638]: DEBUG oslo_concurrency.lockutils [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "1946baab-bb48-4138-8db6-1f530e432c3d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 595.121040] env[68638]: DEBUG nova.scheduler.client.report [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 595.134664] env[68638]: DEBUG nova.compute.manager [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 595.167200] env[68638]: DEBUG nova.virt.hardware [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 595.167523] env[68638]: DEBUG nova.virt.hardware [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 595.167698] env[68638]: DEBUG nova.virt.hardware [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 595.168503] env[68638]: DEBUG nova.virt.hardware [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 595.168677] env[68638]: DEBUG nova.virt.hardware [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 595.168822] env[68638]: DEBUG nova.virt.hardware [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 595.169042] env[68638]: DEBUG nova.virt.hardware [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 595.169198] env[68638]: DEBUG nova.virt.hardware [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 595.169370] env[68638]: DEBUG nova.virt.hardware [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 595.169599] env[68638]: DEBUG nova.virt.hardware [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 595.170038] env[68638]: DEBUG nova.virt.hardware [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 595.170657] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b19fe54e-a257-4108-8235-41e6dbc38a01 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.180865] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47cf5154-19b4-4829-881d-0c19c4b08174 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.196680] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Instance VIF info [] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 595.202790] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Creating folder: Project (bedd57123791409f9405025692459ed7). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 595.203541] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-047b8d9f-acef-4bfb-8d86-1f1ffeb43061 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.216198] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Created folder: Project (bedd57123791409f9405025692459ed7) in parent group-v569734. [ 595.216198] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Creating folder: Instances. Parent ref: group-v569759. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 595.216198] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9915a978-9963-41b8-898b-5481ce8e9d7f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.225746] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Created folder: Instances in parent group-v569759. [ 595.226294] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 595.229240] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 595.229240] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d5939f27-dee8-424c-9328-17b3f44be27b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.246972] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 595.246972] env[68638]: value = "task-2833005" [ 595.246972] env[68638]: _type = "Task" [ 595.246972] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.255912] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833005, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.263964] env[68638]: DEBUG nova.network.neutron [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 595.293312] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833002, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072922} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.294021] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 595.295256] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59cb8dfa-9092-4dc6-bef9-93b75aa9bc3d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.322914] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] c80895d5-1a59-4779-9da9-9aeec10bc395/c80895d5-1a59-4779-9da9-9aeec10bc395.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 595.323270] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4cf890ee-7f59-4493-a6cd-901c1c45b65b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.343436] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for the task: (returnval){ [ 595.343436] env[68638]: value = "task-2833006" [ 595.343436] env[68638]: _type = "Task" [ 595.343436] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.354816] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833006, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.458272] env[68638]: DEBUG oslo_concurrency.lockutils [req-e3913de8-366d-4653-96b5-1b1d8b328d24 req-889ccabd-e606-46e1-b7ce-2a119d35c5b6 service nova] Releasing lock "refresh_cache-c80895d5-1a59-4779-9da9-9aeec10bc395" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 595.459571] env[68638]: DEBUG nova.compute.manager [req-e3913de8-366d-4653-96b5-1b1d8b328d24 req-889ccabd-e606-46e1-b7ce-2a119d35c5b6 service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Received event network-vif-plugged-316407a1-ab13-4bd4-98ef-7e090d54399c {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 595.459571] env[68638]: DEBUG oslo_concurrency.lockutils [req-e3913de8-366d-4653-96b5-1b1d8b328d24 req-889ccabd-e606-46e1-b7ce-2a119d35c5b6 service nova] Acquiring lock "14772ba8-bde2-42ef-9a37-df876c8af321-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 595.459571] env[68638]: DEBUG oslo_concurrency.lockutils [req-e3913de8-366d-4653-96b5-1b1d8b328d24 req-889ccabd-e606-46e1-b7ce-2a119d35c5b6 service nova] Lock "14772ba8-bde2-42ef-9a37-df876c8af321-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 595.459571] env[68638]: DEBUG oslo_concurrency.lockutils [req-e3913de8-366d-4653-96b5-1b1d8b328d24 req-889ccabd-e606-46e1-b7ce-2a119d35c5b6 service nova] Lock "14772ba8-bde2-42ef-9a37-df876c8af321-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 595.460011] env[68638]: DEBUG nova.compute.manager [req-e3913de8-366d-4653-96b5-1b1d8b328d24 req-889ccabd-e606-46e1-b7ce-2a119d35c5b6 service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] No waiting events found dispatching network-vif-plugged-316407a1-ab13-4bd4-98ef-7e090d54399c {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 595.460011] env[68638]: WARNING nova.compute.manager [req-e3913de8-366d-4653-96b5-1b1d8b328d24 req-889ccabd-e606-46e1-b7ce-2a119d35c5b6 service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Received unexpected event network-vif-plugged-316407a1-ab13-4bd4-98ef-7e090d54399c for instance with vm_state building and task_state spawning. [ 595.460154] env[68638]: DEBUG nova.compute.manager [req-e3913de8-366d-4653-96b5-1b1d8b328d24 req-889ccabd-e606-46e1-b7ce-2a119d35c5b6 service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Received event network-changed-316407a1-ab13-4bd4-98ef-7e090d54399c {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 595.460316] env[68638]: DEBUG nova.compute.manager [req-e3913de8-366d-4653-96b5-1b1d8b328d24 req-889ccabd-e606-46e1-b7ce-2a119d35c5b6 service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Refreshing instance network info cache due to event network-changed-316407a1-ab13-4bd4-98ef-7e090d54399c. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 595.460423] env[68638]: DEBUG oslo_concurrency.lockutils [req-e3913de8-366d-4653-96b5-1b1d8b328d24 req-889ccabd-e606-46e1-b7ce-2a119d35c5b6 service nova] Acquiring lock "refresh_cache-14772ba8-bde2-42ef-9a37-df876c8af321" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.460546] env[68638]: DEBUG oslo_concurrency.lockutils [req-e3913de8-366d-4653-96b5-1b1d8b328d24 req-889ccabd-e606-46e1-b7ce-2a119d35c5b6 service nova] Acquired lock "refresh_cache-14772ba8-bde2-42ef-9a37-df876c8af321" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 595.460823] env[68638]: DEBUG nova.network.neutron [req-e3913de8-366d-4653-96b5-1b1d8b328d24 req-889ccabd-e606-46e1-b7ce-2a119d35c5b6 service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Refreshing network info cache for port 316407a1-ab13-4bd4-98ef-7e090d54399c {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 595.466698] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 595.572595] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]529248ec-d516-b4d7-4fbd-e211c311d950, 'name': SearchDatastore_Task, 'duration_secs': 0.022726} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.574048] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cb1fa8d-dea3-4b51-ae4f-c0f30823d0a9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.580360] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 595.580360] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]525650da-e0ce-ab7c-750c-01b45daa565e" [ 595.580360] env[68638]: _type = "Task" [ 595.580360] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.591825] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525650da-e0ce-ab7c-750c-01b45daa565e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.611664] env[68638]: DEBUG nova.network.neutron [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Updating instance_info_cache with network_info: [{"id": "83c2852d-0228-4c4e-b754-0dc81d6b8a11", "address": "fa:16:3e:27:30:e5", "network": {"id": "2181efd7-a094-4c4b-8754-da82e89be85a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1274773453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "efa342b9d9a34e9e8e708c8f356f905e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83c2852d-02", "ovs_interfaceid": "83c2852d-0228-4c4e-b754-0dc81d6b8a11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.627511] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.520s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 595.629116] env[68638]: DEBUG nova.compute.manager [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 595.633097] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.116s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 595.636808] env[68638]: INFO nova.compute.claims [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 595.765194] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833005, 'name': CreateVM_Task, 'duration_secs': 0.330438} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.766083] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 595.766680] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.766785] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 595.767203] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 595.768401] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d923440-5c90-49eb-86ff-e874ab0bf7ae {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.776991] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Waiting for the task: (returnval){ [ 595.776991] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5272cdef-93fe-e50c-521e-1e8baf9e58e4" [ 595.776991] env[68638]: _type = "Task" [ 595.776991] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.788234] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5272cdef-93fe-e50c-521e-1e8baf9e58e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.858139] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833006, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.098184] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525650da-e0ce-ab7c-750c-01b45daa565e, 'name': SearchDatastore_Task, 'duration_secs': 0.012258} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.098184] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 596.098184] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 14772ba8-bde2-42ef-9a37-df876c8af321/14772ba8-bde2-42ef-9a37-df876c8af321.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 596.098184] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9337e335-ebf0-4745-ac38-86a8c4a38f66 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.114743] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 596.114743] env[68638]: value = "task-2833007" [ 596.114743] env[68638]: _type = "Task" [ 596.114743] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.115100] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Releasing lock "refresh_cache-423af2cc-4dea-445f-a01c-6d4d57c3f0de" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 596.116054] env[68638]: DEBUG nova.compute.manager [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Instance network_info: |[{"id": "83c2852d-0228-4c4e-b754-0dc81d6b8a11", "address": "fa:16:3e:27:30:e5", "network": {"id": "2181efd7-a094-4c4b-8754-da82e89be85a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1274773453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "efa342b9d9a34e9e8e708c8f356f905e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83c2852d-02", "ovs_interfaceid": "83c2852d-0228-4c4e-b754-0dc81d6b8a11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 596.117015] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:30:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69cfa7ba-6989-4d75-9495-97b5fea00c3c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '83c2852d-0228-4c4e-b754-0dc81d6b8a11', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 596.124821] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Creating folder: Project (efa342b9d9a34e9e8e708c8f356f905e). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 596.128793] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e433590a-2787-4301-bc27-9f0a497ed3fb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.138697] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833007, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.143013] env[68638]: DEBUG nova.compute.utils [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 596.144273] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Created folder: Project (efa342b9d9a34e9e8e708c8f356f905e) in parent group-v569734. [ 596.144441] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Creating folder: Instances. Parent ref: group-v569762. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 596.145043] env[68638]: DEBUG nova.compute.manager [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 596.145216] env[68638]: DEBUG nova.network.neutron [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 596.146915] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-20e4f7b4-d7c6-4a83-bb45-06d1b8b3e2da {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.157101] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Created folder: Instances in parent group-v569762. [ 596.157354] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 596.157650] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 596.157727] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb8144fb-ba90-4868-9554-e2f254d83549 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.181159] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 596.181159] env[68638]: value = "task-2833010" [ 596.181159] env[68638]: _type = "Task" [ 596.181159] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.196506] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833010, 'name': CreateVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.264114] env[68638]: DEBUG nova.policy [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fb0586f390014d92a294e355fade07bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f0b7a2c6272941b6ae7d296d4541e9e0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 596.290490] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5272cdef-93fe-e50c-521e-1e8baf9e58e4, 'name': SearchDatastore_Task, 'duration_secs': 0.025478} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.290805] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 596.291045] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 596.291320] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.291464] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 596.291695] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 596.291947] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8bed715c-bcfe-43e2-96a6-35bad5bd502b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.306144] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 596.306445] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 596.307256] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-946e68a9-d765-4881-b07c-36c8ccff17d5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.313875] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Waiting for the task: (returnval){ [ 596.313875] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]522475e9-8d15-5c2e-7db7-ccf30c01d562" [ 596.313875] env[68638]: _type = "Task" [ 596.313875] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.331569] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]522475e9-8d15-5c2e-7db7-ccf30c01d562, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.331870] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Acquiring lock "421c377f-0b7a-457d-b5dd-50281c65122a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 596.332100] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Lock "421c377f-0b7a-457d-b5dd-50281c65122a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 596.355599] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833006, 'name': ReconfigVM_Task, 'duration_secs': 0.800578} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.355919] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Reconfigured VM instance instance-00000007 to attach disk [datastore2] c80895d5-1a59-4779-9da9-9aeec10bc395/c80895d5-1a59-4779-9da9-9aeec10bc395.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 596.356672] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a0cbc770-e001-421c-9947-6697a5e6ea2b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.366089] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for the task: (returnval){ [ 596.366089] env[68638]: value = "task-2833011" [ 596.366089] env[68638]: _type = "Task" [ 596.366089] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.378177] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833011, 'name': Rename_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.531657] env[68638]: DEBUG nova.network.neutron [req-e3913de8-366d-4653-96b5-1b1d8b328d24 req-889ccabd-e606-46e1-b7ce-2a119d35c5b6 service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Updated VIF entry in instance network info cache for port 316407a1-ab13-4bd4-98ef-7e090d54399c. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 596.532710] env[68638]: DEBUG nova.network.neutron [req-e3913de8-366d-4653-96b5-1b1d8b328d24 req-889ccabd-e606-46e1-b7ce-2a119d35c5b6 service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Updating instance_info_cache with network_info: [{"id": "316407a1-ab13-4bd4-98ef-7e090d54399c", "address": "fa:16:3e:ca:38:c2", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap316407a1-ab", "ovs_interfaceid": "316407a1-ab13-4bd4-98ef-7e090d54399c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.632221] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833007, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.645938] env[68638]: DEBUG nova.compute.manager [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 596.695440] env[68638]: DEBUG nova.compute.manager [None req-46288e9c-719b-42ef-bfd1-7f8ba8cb0c90 tempest-ServerDiagnosticsV248Test-1708395640 tempest-ServerDiagnosticsV248Test-1708395640-project-admin] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 596.697741] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24aa25fe-56e2-441f-b7b5-b947c87deb3f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.713198] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833010, 'name': CreateVM_Task, 'duration_secs': 0.335855} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.717652] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 596.719019] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.719275] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 596.719590] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 596.727621] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6bbca67-d2e6-4465-a8bf-967356acaf9e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.731390] env[68638]: INFO nova.compute.manager [None req-46288e9c-719b-42ef-bfd1-7f8ba8cb0c90 tempest-ServerDiagnosticsV248Test-1708395640 tempest-ServerDiagnosticsV248Test-1708395640-project-admin] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Retrieving diagnostics [ 596.732493] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c5c473-0f19-4973-91d0-e4358f0a1c87 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.740321] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 596.740321] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52c72284-da33-2d74-706e-70be415fd57e" [ 596.740321] env[68638]: _type = "Task" [ 596.740321] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.788531] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Acquiring lock "53571ad6-1fdb-4651-8b4d-24f35ffc815a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 596.788531] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Lock "53571ad6-1fdb-4651-8b4d-24f35ffc815a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 596.789127] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52c72284-da33-2d74-706e-70be415fd57e, 'name': SearchDatastore_Task, 'duration_secs': 0.021855} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.789836] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 596.789836] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 596.789836] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.823923] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]522475e9-8d15-5c2e-7db7-ccf30c01d562, 'name': SearchDatastore_Task, 'duration_secs': 0.013049} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.827640] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-984d6d5e-be47-44a6-8966-335543f63050 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.842020] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Waiting for the task: (returnval){ [ 596.842020] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]520548dc-b555-917f-4ec2-b3c0e54522ba" [ 596.842020] env[68638]: _type = "Task" [ 596.842020] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.854375] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]520548dc-b555-917f-4ec2-b3c0e54522ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.881363] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833011, 'name': Rename_Task, 'duration_secs': 0.192394} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.881648] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 596.881892] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d29d5700-4815-4ffe-a764-d4d5d483ab9a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.888300] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for the task: (returnval){ [ 596.888300] env[68638]: value = "task-2833012" [ 596.888300] env[68638]: _type = "Task" [ 596.888300] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.898822] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833012, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.913759] env[68638]: DEBUG nova.network.neutron [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Successfully created port: f3550783-066c-4341-b12e-157c8000cb63 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 597.035298] env[68638]: DEBUG oslo_concurrency.lockutils [req-e3913de8-366d-4653-96b5-1b1d8b328d24 req-889ccabd-e606-46e1-b7ce-2a119d35c5b6 service nova] Releasing lock "refresh_cache-14772ba8-bde2-42ef-9a37-df876c8af321" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 597.087202] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b096e0d8-d1b2-4c8d-a514-08a477c7fdf4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.096646] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4224b91-2380-4313-97f4-75c42decbc19 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.130503] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357c48c9-277d-4191-9c75-c29db1d8a91b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.142300] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833007, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.683772} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.143998] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d64146f7-e61b-4a56-8cbd-55441193d55f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.148840] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 14772ba8-bde2-42ef-9a37-df876c8af321/14772ba8-bde2-42ef-9a37-df876c8af321.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 597.149094] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 597.149381] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b350d205-56d3-4fa6-91cb-5d0f5ea273fc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.167324] env[68638]: DEBUG nova.compute.provider_tree [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 597.173769] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 597.173769] env[68638]: value = "task-2833013" [ 597.173769] env[68638]: _type = "Task" [ 597.173769] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.181171] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833013, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.355936] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]520548dc-b555-917f-4ec2-b3c0e54522ba, 'name': SearchDatastore_Task, 'duration_secs': 0.019488} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.356454] env[68638]: DEBUG nova.compute.manager [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Stashing vm_state: active {{(pid=68638) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 597.359290] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 597.359561] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] c71693e9-aeaa-4f12-b5cf-a179e558505d/c71693e9-aeaa-4f12-b5cf-a179e558505d.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 597.360030] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 597.360230] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 597.360627] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b4f52513-fbfa-4a52-8b9c-2ce397bbab32 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.362992] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-beea466f-1e69-4dfa-b71f-3b5f8ee65c10 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.369533] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Waiting for the task: (returnval){ [ 597.369533] env[68638]: value = "task-2833014" [ 597.369533] env[68638]: _type = "Task" [ 597.369533] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.374131] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 597.374131] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 597.375155] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-041bb2d1-8cb0-457e-83e7-8a49d688dedb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.380570] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833014, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.383692] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 597.383692] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5296b90e-6beb-1308-378f-627a9eb127c0" [ 597.383692] env[68638]: _type = "Task" [ 597.383692] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.396739] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5296b90e-6beb-1308-378f-627a9eb127c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.408065] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833012, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.671418] env[68638]: DEBUG nova.compute.manager [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 597.676525] env[68638]: DEBUG nova.scheduler.client.report [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 597.689764] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833013, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06381} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.690042] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 597.690835] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-657f747b-678f-4a64-89ad-804936b72f94 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.715230] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] 14772ba8-bde2-42ef-9a37-df876c8af321/14772ba8-bde2-42ef-9a37-df876c8af321.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 597.717680] env[68638]: DEBUG nova.virt.hardware [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 597.717922] env[68638]: DEBUG nova.virt.hardware [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 597.718089] env[68638]: DEBUG nova.virt.hardware [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 597.718275] env[68638]: DEBUG nova.virt.hardware [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 597.718419] env[68638]: DEBUG nova.virt.hardware [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 597.718563] env[68638]: DEBUG nova.virt.hardware [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 597.718774] env[68638]: DEBUG nova.virt.hardware [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 597.718927] env[68638]: DEBUG nova.virt.hardware [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 597.719127] env[68638]: DEBUG nova.virt.hardware [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 597.719257] env[68638]: DEBUG nova.virt.hardware [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 597.719429] env[68638]: DEBUG nova.virt.hardware [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 597.720371] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1953fbf9-efda-46ec-8f36-3e14a92be993 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.736880] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb2202fe-1351-4a81-87ab-65610dd284da {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.747684] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b507e77-25ca-4692-9a9e-467a6f661556 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.751081] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 597.751081] env[68638]: value = "task-2833015" [ 597.751081] env[68638]: _type = "Task" [ 597.751081] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.770156] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833015, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.884459] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833014, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.885574] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 597.896298] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5296b90e-6beb-1308-378f-627a9eb127c0, 'name': SearchDatastore_Task, 'duration_secs': 0.020773} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.897353] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44429021-7dd2-4113-b1d7-5f8f57764fda {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.908215] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 597.908215] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ee25c0-3625-7dd5-7cdf-a17fa22f69f2" [ 597.908215] env[68638]: _type = "Task" [ 597.908215] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.912712] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833012, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.926013] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ee25c0-3625-7dd5-7cdf-a17fa22f69f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.042308] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Acquiring lock "32efc578-2cf9-4b61-bbaa-aa7031a04e33" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 598.042630] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Lock "32efc578-2cf9-4b61-bbaa-aa7031a04e33" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 598.184814] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.552s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 598.185629] env[68638]: DEBUG nova.compute.manager [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 598.188703] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.374s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 598.190925] env[68638]: INFO nova.compute.claims [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 598.263750] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833015, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.339962] env[68638]: DEBUG nova.compute.manager [req-3a6a937a-ca5f-4a85-92d0-6b7e1d71ea93 req-43d5b15b-ee60-4bf9-bbe8-adef378b1d03 service nova] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Received event network-vif-plugged-83c2852d-0228-4c4e-b754-0dc81d6b8a11 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 598.340251] env[68638]: DEBUG oslo_concurrency.lockutils [req-3a6a937a-ca5f-4a85-92d0-6b7e1d71ea93 req-43d5b15b-ee60-4bf9-bbe8-adef378b1d03 service nova] Acquiring lock "423af2cc-4dea-445f-a01c-6d4d57c3f0de-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 598.340486] env[68638]: DEBUG oslo_concurrency.lockutils [req-3a6a937a-ca5f-4a85-92d0-6b7e1d71ea93 req-43d5b15b-ee60-4bf9-bbe8-adef378b1d03 service nova] Lock "423af2cc-4dea-445f-a01c-6d4d57c3f0de-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 598.340670] env[68638]: DEBUG oslo_concurrency.lockutils [req-3a6a937a-ca5f-4a85-92d0-6b7e1d71ea93 req-43d5b15b-ee60-4bf9-bbe8-adef378b1d03 service nova] Lock "423af2cc-4dea-445f-a01c-6d4d57c3f0de-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 598.340848] env[68638]: DEBUG nova.compute.manager [req-3a6a937a-ca5f-4a85-92d0-6b7e1d71ea93 req-43d5b15b-ee60-4bf9-bbe8-adef378b1d03 service nova] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] No waiting events found dispatching network-vif-plugged-83c2852d-0228-4c4e-b754-0dc81d6b8a11 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 598.341065] env[68638]: WARNING nova.compute.manager [req-3a6a937a-ca5f-4a85-92d0-6b7e1d71ea93 req-43d5b15b-ee60-4bf9-bbe8-adef378b1d03 service nova] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Received unexpected event network-vif-plugged-83c2852d-0228-4c4e-b754-0dc81d6b8a11 for instance with vm_state building and task_state spawning. [ 598.341619] env[68638]: DEBUG nova.compute.manager [req-3a6a937a-ca5f-4a85-92d0-6b7e1d71ea93 req-43d5b15b-ee60-4bf9-bbe8-adef378b1d03 service nova] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Received event network-changed-83c2852d-0228-4c4e-b754-0dc81d6b8a11 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 598.345019] env[68638]: DEBUG nova.compute.manager [req-3a6a937a-ca5f-4a85-92d0-6b7e1d71ea93 req-43d5b15b-ee60-4bf9-bbe8-adef378b1d03 service nova] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Refreshing instance network info cache due to event network-changed-83c2852d-0228-4c4e-b754-0dc81d6b8a11. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 598.345019] env[68638]: DEBUG oslo_concurrency.lockutils [req-3a6a937a-ca5f-4a85-92d0-6b7e1d71ea93 req-43d5b15b-ee60-4bf9-bbe8-adef378b1d03 service nova] Acquiring lock "refresh_cache-423af2cc-4dea-445f-a01c-6d4d57c3f0de" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.345019] env[68638]: DEBUG oslo_concurrency.lockutils [req-3a6a937a-ca5f-4a85-92d0-6b7e1d71ea93 req-43d5b15b-ee60-4bf9-bbe8-adef378b1d03 service nova] Acquired lock "refresh_cache-423af2cc-4dea-445f-a01c-6d4d57c3f0de" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 598.345019] env[68638]: DEBUG nova.network.neutron [req-3a6a937a-ca5f-4a85-92d0-6b7e1d71ea93 req-43d5b15b-ee60-4bf9-bbe8-adef378b1d03 service nova] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Refreshing network info cache for port 83c2852d-0228-4c4e-b754-0dc81d6b8a11 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 598.385457] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833014, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.924352} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.385702] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] c71693e9-aeaa-4f12-b5cf-a179e558505d/c71693e9-aeaa-4f12-b5cf-a179e558505d.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 598.385910] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 598.387974] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9ca971e7-35d2-4cce-b799-d6350c32db4e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.395238] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Waiting for the task: (returnval){ [ 598.395238] env[68638]: value = "task-2833016" [ 598.395238] env[68638]: _type = "Task" [ 598.395238] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.409537] env[68638]: DEBUG oslo_vmware.api [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833012, 'name': PowerOnVM_Task, 'duration_secs': 1.042182} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.413010] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 598.413658] env[68638]: INFO nova.compute.manager [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Took 11.45 seconds to spawn the instance on the hypervisor. [ 598.414181] env[68638]: DEBUG nova.compute.manager [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 598.414608] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833016, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.416624] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e6f4d12-a228-4f71-bc3e-ea31ce741167 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.440026] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ee25c0-3625-7dd5-7cdf-a17fa22f69f2, 'name': SearchDatastore_Task, 'duration_secs': 0.068965} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.442383] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 598.442914] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 423af2cc-4dea-445f-a01c-6d4d57c3f0de/423af2cc-4dea-445f-a01c-6d4d57c3f0de.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 598.443559] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f5e5698-4929-4694-adf5-af546c5d10d1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.451931] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 598.451931] env[68638]: value = "task-2833017" [ 598.451931] env[68638]: _type = "Task" [ 598.451931] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.463594] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833017, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.513418] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "8fe9ba7e-021c-4b0f-a9ba-df7a6b753248" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 598.513721] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "8fe9ba7e-021c-4b0f-a9ba-df7a6b753248" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 598.696961] env[68638]: DEBUG nova.compute.utils [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 598.705762] env[68638]: DEBUG nova.compute.manager [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 598.706520] env[68638]: DEBUG nova.network.neutron [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 598.767435] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833015, 'name': ReconfigVM_Task, 'duration_secs': 0.990613} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.768113] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Reconfigured VM instance instance-00000008 to attach disk [datastore1] 14772ba8-bde2-42ef-9a37-df876c8af321/14772ba8-bde2-42ef-9a37-df876c8af321.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 598.769012] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ce128039-a5dd-4bbc-b190-891dd09d113e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.780078] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 598.780078] env[68638]: value = "task-2833018" [ 598.780078] env[68638]: _type = "Task" [ 598.780078] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.791289] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833018, 'name': Rename_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.901937] env[68638]: DEBUG nova.policy [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '677b7336f73745ba9f10d283ad41d258', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1e4d1720c32b4e559739d6cbc868a0f3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 598.911107] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833016, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.126995} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.911679] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 598.913714] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3a8ca4-1433-44a7-9250-188181a2938b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.941892] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] c71693e9-aeaa-4f12-b5cf-a179e558505d/c71693e9-aeaa-4f12-b5cf-a179e558505d.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 598.944207] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-585410c1-03b4-49b8-bb0c-d6c959e897d0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.973435] env[68638]: INFO nova.compute.manager [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Took 20.02 seconds to build instance. [ 598.978893] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Waiting for the task: (returnval){ [ 598.978893] env[68638]: value = "task-2833019" [ 598.978893] env[68638]: _type = "Task" [ 598.978893] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.985712] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833017, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.994838] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833019, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.015368] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Acquiring lock "be761cf1-0949-42c0-8a38-58af33113a03" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 599.015998] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Lock "be761cf1-0949-42c0-8a38-58af33113a03" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 599.201727] env[68638]: DEBUG nova.compute.manager [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 599.297691] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833018, 'name': Rename_Task, 'duration_secs': 0.388646} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.298652] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 599.299044] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-17cbea7b-75a3-4d2b-9157-2d68b40a5f3e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.306627] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 599.306627] env[68638]: value = "task-2833020" [ 599.306627] env[68638]: _type = "Task" [ 599.306627] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.315654] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833020, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.442855] env[68638]: DEBUG nova.network.neutron [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Successfully updated port: f3550783-066c-4341-b12e-157c8000cb63 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 599.478807] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c415f0fc-f669-4118-9a0e-8abbbf0bfe8c tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "c80895d5-1a59-4779-9da9-9aeec10bc395" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.550s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 599.489576] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833017, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.604094} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.494770] env[68638]: DEBUG nova.network.neutron [req-3a6a937a-ca5f-4a85-92d0-6b7e1d71ea93 req-43d5b15b-ee60-4bf9-bbe8-adef378b1d03 service nova] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Updated VIF entry in instance network info cache for port 83c2852d-0228-4c4e-b754-0dc81d6b8a11. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 599.495163] env[68638]: DEBUG nova.network.neutron [req-3a6a937a-ca5f-4a85-92d0-6b7e1d71ea93 req-43d5b15b-ee60-4bf9-bbe8-adef378b1d03 service nova] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Updating instance_info_cache with network_info: [{"id": "83c2852d-0228-4c4e-b754-0dc81d6b8a11", "address": "fa:16:3e:27:30:e5", "network": {"id": "2181efd7-a094-4c4b-8754-da82e89be85a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1274773453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "efa342b9d9a34e9e8e708c8f356f905e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83c2852d-02", "ovs_interfaceid": "83c2852d-0228-4c4e-b754-0dc81d6b8a11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.500073] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 423af2cc-4dea-445f-a01c-6d4d57c3f0de/423af2cc-4dea-445f-a01c-6d4d57c3f0de.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 599.500321] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 599.501895] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9e7ae37c-4119-41a3-ab40-ea33bd6c1bc7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.512785] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833019, 'name': ReconfigVM_Task, 'duration_secs': 0.293468} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.512785] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Reconfigured VM instance instance-0000000a to attach disk [datastore1] c71693e9-aeaa-4f12-b5cf-a179e558505d/c71693e9-aeaa-4f12-b5cf-a179e558505d.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 599.512785] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 599.512785] env[68638]: value = "task-2833021" [ 599.512785] env[68638]: _type = "Task" [ 599.512785] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.513270] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd282ba0-27ff-474f-9f36-a98811fb27f9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.526649] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833021, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.528670] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Waiting for the task: (returnval){ [ 599.528670] env[68638]: value = "task-2833022" [ 599.528670] env[68638]: _type = "Task" [ 599.528670] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.537654] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833022, 'name': Rename_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.628607] env[68638]: DEBUG oslo_concurrency.lockutils [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Acquiring lock "4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 599.628872] env[68638]: DEBUG oslo_concurrency.lockutils [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Lock "4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 599.629091] env[68638]: DEBUG oslo_concurrency.lockutils [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Acquiring lock "4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 599.629291] env[68638]: DEBUG oslo_concurrency.lockutils [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Lock "4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 599.630909] env[68638]: DEBUG oslo_concurrency.lockutils [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Lock "4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 599.633637] env[68638]: INFO nova.compute.manager [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Terminating instance [ 599.733936] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b18af34a-af9f-45ac-83f1-dd377979bf2e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.740479] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b3f81e9-f317-42f4-a584-41bbafc18c76 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.789463] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84fcf74-85c3-4546-ba7e-218185af0557 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.797853] env[68638]: DEBUG nova.network.neutron [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Successfully created port: 93a70134-a907-41d4-bce1-2bf7496b23bb {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 599.808274] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-729cf7cc-f45a-4042-9e8d-673182dc490f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.831439] env[68638]: DEBUG nova.compute.provider_tree [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 599.836313] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833020, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.876564] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquiring lock "168c2937-f8ce-472f-b21f-e48eed909f43" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 599.877354] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Lock "168c2937-f8ce-472f-b21f-e48eed909f43" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 599.949288] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Acquiring lock "refresh_cache-a5e993de-7aad-4b34-8946-563dc69a6f25" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.950433] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Acquired lock "refresh_cache-a5e993de-7aad-4b34-8946-563dc69a6f25" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 599.950433] env[68638]: DEBUG nova.network.neutron [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 599.991673] env[68638]: DEBUG nova.compute.manager [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 600.003036] env[68638]: DEBUG oslo_concurrency.lockutils [req-3a6a937a-ca5f-4a85-92d0-6b7e1d71ea93 req-43d5b15b-ee60-4bf9-bbe8-adef378b1d03 service nova] Releasing lock "refresh_cache-423af2cc-4dea-445f-a01c-6d4d57c3f0de" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 600.028430] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833021, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067044} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.028792] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 600.031024] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b0c7c0-e9c0-45c2-901b-2ad914553b96 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.055652] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833022, 'name': Rename_Task, 'duration_secs': 0.145226} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.074923] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 600.088857] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] 423af2cc-4dea-445f-a01c-6d4d57c3f0de/423af2cc-4dea-445f-a01c-6d4d57c3f0de.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 600.089939] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d18b0d49-6900-4ef8-8701-58ad90c74a12 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.091857] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-372822f6-bf7b-4403-b04f-e47b621cc62a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.113995] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 600.113995] env[68638]: value = "task-2833024" [ 600.113995] env[68638]: _type = "Task" [ 600.113995] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.120168] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Waiting for the task: (returnval){ [ 600.120168] env[68638]: value = "task-2833023" [ 600.120168] env[68638]: _type = "Task" [ 600.120168] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.136979] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833023, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.137763] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833024, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.138150] env[68638]: DEBUG oslo_concurrency.lockutils [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Acquiring lock "refresh_cache-4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.138307] env[68638]: DEBUG oslo_concurrency.lockutils [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Acquired lock "refresh_cache-4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 600.138478] env[68638]: DEBUG nova.network.neutron [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 600.215547] env[68638]: DEBUG nova.compute.manager [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 600.251374] env[68638]: DEBUG nova.virt.hardware [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 600.252313] env[68638]: DEBUG nova.virt.hardware [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 600.252313] env[68638]: DEBUG nova.virt.hardware [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 600.252313] env[68638]: DEBUG nova.virt.hardware [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 600.252313] env[68638]: DEBUG nova.virt.hardware [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 600.252313] env[68638]: DEBUG nova.virt.hardware [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 600.252528] env[68638]: DEBUG nova.virt.hardware [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 600.252809] env[68638]: DEBUG nova.virt.hardware [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 600.252862] env[68638]: DEBUG nova.virt.hardware [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 600.253035] env[68638]: DEBUG nova.virt.hardware [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 600.253219] env[68638]: DEBUG nova.virt.hardware [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 600.254178] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fdd34a2-911c-4797-9d8e-5ae9f415d1b0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.264948] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2125ef-fe90-4a3c-a6d7-ef7eacfacec4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.315509] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833020, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.339535] env[68638]: DEBUG nova.scheduler.client.report [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 600.518322] env[68638]: DEBUG nova.network.neutron [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 600.526890] env[68638]: DEBUG oslo_concurrency.lockutils [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 600.635689] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833024, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.645674] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833023, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.685877] env[68638]: DEBUG nova.network.neutron [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 600.792057] env[68638]: DEBUG nova.network.neutron [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.823210] env[68638]: DEBUG oslo_vmware.api [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833020, 'name': PowerOnVM_Task, 'duration_secs': 1.211792} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.823499] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 600.824687] env[68638]: INFO nova.compute.manager [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Took 11.48 seconds to spawn the instance on the hypervisor. [ 600.824687] env[68638]: DEBUG nova.compute.manager [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 600.824876] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e284830-5879-4229-89c3-ceeb4e28fcd1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.847385] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.657s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 600.847385] env[68638]: DEBUG nova.compute.manager [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 600.849722] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.632s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 600.850378] env[68638]: DEBUG nova.objects.instance [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] Lazy-loading 'resources' on Instance uuid 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 600.942628] env[68638]: DEBUG nova.network.neutron [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Updating instance_info_cache with network_info: [{"id": "f3550783-066c-4341-b12e-157c8000cb63", "address": "fa:16:3e:f7:50:bf", "network": {"id": "24c39f5a-6de3-42b3-a5be-4e4241cad6e3", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1360563826-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f0b7a2c6272941b6ae7d296d4541e9e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3550783-06", "ovs_interfaceid": "f3550783-066c-4341-b12e-157c8000cb63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.964820] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 600.965048] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 601.131840] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833024, 'name': ReconfigVM_Task, 'duration_secs': 0.8472} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.132498] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Reconfigured VM instance instance-00000009 to attach disk [datastore1] 423af2cc-4dea-445f-a01c-6d4d57c3f0de/423af2cc-4dea-445f-a01c-6d4d57c3f0de.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 601.133308] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e9dd312-562d-43e7-9f41-5d07ecba803d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.138914] env[68638]: DEBUG oslo_vmware.api [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833023, 'name': PowerOnVM_Task, 'duration_secs': 0.96944} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.139742] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 601.139742] env[68638]: INFO nova.compute.manager [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Took 6.00 seconds to spawn the instance on the hypervisor. [ 601.139742] env[68638]: DEBUG nova.compute.manager [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 601.143024] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2d5685c-2e28-46a7-a164-29ff6f92aa90 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.146632] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 601.146632] env[68638]: value = "task-2833025" [ 601.146632] env[68638]: _type = "Task" [ 601.146632] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.162620] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833025, 'name': Rename_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.298049] env[68638]: DEBUG oslo_concurrency.lockutils [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Releasing lock "refresh_cache-4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 601.298594] env[68638]: DEBUG nova.compute.manager [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 601.298594] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 601.299834] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e349a84e-84b3-45db-ab29-8f19380e6ddc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.308148] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 601.308756] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c45ebe28-db23-4bf7-9398-45142709f09b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.318578] env[68638]: DEBUG oslo_vmware.api [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Waiting for the task: (returnval){ [ 601.318578] env[68638]: value = "task-2833026" [ 601.318578] env[68638]: _type = "Task" [ 601.318578] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.328737] env[68638]: DEBUG oslo_vmware.api [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': task-2833026, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.354339] env[68638]: INFO nova.compute.manager [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Took 21.74 seconds to build instance. [ 601.354339] env[68638]: DEBUG nova.compute.utils [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 601.356509] env[68638]: DEBUG nova.compute.manager [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 601.356742] env[68638]: DEBUG nova.network.neutron [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 601.448530] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Releasing lock "refresh_cache-a5e993de-7aad-4b34-8946-563dc69a6f25" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 601.448530] env[68638]: DEBUG nova.compute.manager [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Instance network_info: |[{"id": "f3550783-066c-4341-b12e-157c8000cb63", "address": "fa:16:3e:f7:50:bf", "network": {"id": "24c39f5a-6de3-42b3-a5be-4e4241cad6e3", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1360563826-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f0b7a2c6272941b6ae7d296d4541e9e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3550783-06", "ovs_interfaceid": "f3550783-066c-4341-b12e-157c8000cb63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 601.451680] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:50:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f3550783-066c-4341-b12e-157c8000cb63', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 601.461729] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Creating folder: Project (f0b7a2c6272941b6ae7d296d4541e9e0). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 601.464041] env[68638]: DEBUG nova.policy [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9b433e593c2340e49eefa21c93b43f31', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6d127964153f4854b10dfc8f8eb0009d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 601.466038] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1a998225-5490-4163-9aef-67829a709935 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.477205] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Created folder: Project (f0b7a2c6272941b6ae7d296d4541e9e0) in parent group-v569734. [ 601.477756] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Creating folder: Instances. Parent ref: group-v569765. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 601.481872] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-761ebd19-843e-489b-8466-a5c90275ee6b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.492423] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Created folder: Instances in parent group-v569765. [ 601.492775] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 601.492896] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 601.493116] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f24fc037-6ce0-4761-abe8-f784fc6c6b7c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.518912] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 601.518912] env[68638]: value = "task-2833029" [ 601.518912] env[68638]: _type = "Task" [ 601.518912] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.530043] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833029, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.657057] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833025, 'name': Rename_Task, 'duration_secs': 0.288437} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.657352] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 601.657603] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-68c34aa8-aed1-4a49-9af6-00c48f998d8b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.675361] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 601.675361] env[68638]: value = "task-2833030" [ 601.675361] env[68638]: _type = "Task" [ 601.675361] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.678681] env[68638]: INFO nova.compute.manager [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Took 19.01 seconds to build instance. [ 601.684575] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833030, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.768579] env[68638]: DEBUG nova.compute.manager [req-56cc5c29-2269-42f0-8998-705d41885757 req-755c034b-d4d8-40bb-bbed-8dd7bff83b7e service nova] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Received event network-vif-plugged-f3550783-066c-4341-b12e-157c8000cb63 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 601.768813] env[68638]: DEBUG oslo_concurrency.lockutils [req-56cc5c29-2269-42f0-8998-705d41885757 req-755c034b-d4d8-40bb-bbed-8dd7bff83b7e service nova] Acquiring lock "a5e993de-7aad-4b34-8946-563dc69a6f25-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 601.769187] env[68638]: DEBUG oslo_concurrency.lockutils [req-56cc5c29-2269-42f0-8998-705d41885757 req-755c034b-d4d8-40bb-bbed-8dd7bff83b7e service nova] Lock "a5e993de-7aad-4b34-8946-563dc69a6f25-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 601.769375] env[68638]: DEBUG oslo_concurrency.lockutils [req-56cc5c29-2269-42f0-8998-705d41885757 req-755c034b-d4d8-40bb-bbed-8dd7bff83b7e service nova] Lock "a5e993de-7aad-4b34-8946-563dc69a6f25-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 601.769557] env[68638]: DEBUG nova.compute.manager [req-56cc5c29-2269-42f0-8998-705d41885757 req-755c034b-d4d8-40bb-bbed-8dd7bff83b7e service nova] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] No waiting events found dispatching network-vif-plugged-f3550783-066c-4341-b12e-157c8000cb63 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 601.769720] env[68638]: WARNING nova.compute.manager [req-56cc5c29-2269-42f0-8998-705d41885757 req-755c034b-d4d8-40bb-bbed-8dd7bff83b7e service nova] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Received unexpected event network-vif-plugged-f3550783-066c-4341-b12e-157c8000cb63 for instance with vm_state building and task_state spawning. [ 601.769872] env[68638]: DEBUG nova.compute.manager [req-56cc5c29-2269-42f0-8998-705d41885757 req-755c034b-d4d8-40bb-bbed-8dd7bff83b7e service nova] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Received event network-changed-f3550783-066c-4341-b12e-157c8000cb63 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 601.770082] env[68638]: DEBUG nova.compute.manager [req-56cc5c29-2269-42f0-8998-705d41885757 req-755c034b-d4d8-40bb-bbed-8dd7bff83b7e service nova] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Refreshing instance network info cache due to event network-changed-f3550783-066c-4341-b12e-157c8000cb63. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 601.770295] env[68638]: DEBUG oslo_concurrency.lockutils [req-56cc5c29-2269-42f0-8998-705d41885757 req-755c034b-d4d8-40bb-bbed-8dd7bff83b7e service nova] Acquiring lock "refresh_cache-a5e993de-7aad-4b34-8946-563dc69a6f25" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.770431] env[68638]: DEBUG oslo_concurrency.lockutils [req-56cc5c29-2269-42f0-8998-705d41885757 req-755c034b-d4d8-40bb-bbed-8dd7bff83b7e service nova] Acquired lock "refresh_cache-a5e993de-7aad-4b34-8946-563dc69a6f25" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 601.771343] env[68638]: DEBUG nova.network.neutron [req-56cc5c29-2269-42f0-8998-705d41885757 req-755c034b-d4d8-40bb-bbed-8dd7bff83b7e service nova] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Refreshing network info cache for port f3550783-066c-4341-b12e-157c8000cb63 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 601.833603] env[68638]: DEBUG oslo_vmware.api [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': task-2833026, 'name': PowerOffVM_Task, 'duration_secs': 0.194851} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.833911] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 601.833975] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 601.834274] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52204efb-b620-41f4-8997-242778b202a3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.857667] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ecf1595e-2465-45b9-ab35-bdc0297e9c66 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "14772ba8-bde2-42ef-9a37-df876c8af321" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.265s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 601.858236] env[68638]: DEBUG nova.compute.manager [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 601.868569] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 601.868569] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 601.868569] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Deleting the datastore file [datastore1] 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 601.872024] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8a7b4eb1-2ee5-4280-881a-344747a01f1a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.872551] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa2f6da-0abc-4da2-ae77-bb0703b2a165 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.884149] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd8d1eba-3c66-4d25-b8a8-1f7f6543e61a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.888679] env[68638]: DEBUG oslo_vmware.api [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Waiting for the task: (returnval){ [ 601.888679] env[68638]: value = "task-2833032" [ 601.888679] env[68638]: _type = "Task" [ 601.888679] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.926529] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1303eafd-0677-46b2-b96c-a714f9676e5c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.933015] env[68638]: DEBUG oslo_vmware.api [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': task-2833032, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.938482] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1d0070b-b22a-45e2-8a93-800a8b82c5b0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.953830] env[68638]: DEBUG nova.compute.provider_tree [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 601.971458] env[68638]: DEBUG oslo_concurrency.lockutils [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Acquiring lock "1eee31b7-db8b-4765-8cc2-4273717ef86e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 601.971696] env[68638]: DEBUG oslo_concurrency.lockutils [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Lock "1eee31b7-db8b-4765-8cc2-4273717ef86e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.029913] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833029, 'name': CreateVM_Task, 'duration_secs': 0.38181} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.030348] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 602.031110] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.031384] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 602.031800] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 602.032105] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cd2f13b-c6ea-4ac0-b128-14d30754d383 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.040656] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Waiting for the task: (returnval){ [ 602.040656] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]522b309c-6cce-9800-ff6b-ec0b8dc37f40" [ 602.040656] env[68638]: _type = "Task" [ 602.040656] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.046991] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]522b309c-6cce-9800-ff6b-ec0b8dc37f40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.132849] env[68638]: DEBUG nova.network.neutron [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Successfully created port: 173c13c1-a5ec-4a6b-98d8-e039626a047e {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 602.184276] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833030, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.185951] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba96e130-8c93-4d70-aa70-997550e9113a tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Lock "c71693e9-aeaa-4f12-b5cf-a179e558505d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.529s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 602.368886] env[68638]: DEBUG nova.compute.manager [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 602.401159] env[68638]: DEBUG oslo_vmware.api [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Task: {'id': task-2833032, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.285282} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.401477] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 602.401668] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 602.401855] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 602.402044] env[68638]: INFO nova.compute.manager [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Took 1.10 seconds to destroy the instance on the hypervisor. [ 602.402296] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 602.402491] env[68638]: DEBUG nova.compute.manager [-] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 602.403567] env[68638]: DEBUG nova.network.neutron [-] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 602.447737] env[68638]: DEBUG nova.network.neutron [-] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 602.458955] env[68638]: DEBUG nova.scheduler.client.report [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 602.549163] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]522b309c-6cce-9800-ff6b-ec0b8dc37f40, 'name': SearchDatastore_Task, 'duration_secs': 0.02305} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.550158] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 602.550158] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 602.550158] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.550158] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 602.550311] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 602.550508] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6c9cc9d7-d90c-4185-bb1a-89d6cfb6a336 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.560695] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 602.560695] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 602.560979] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89580502-3ece-46e9-acbd-77eef46697d2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.566952] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Waiting for the task: (returnval){ [ 602.566952] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ae7023-cab4-675c-2bff-1a65229fceba" [ 602.566952] env[68638]: _type = "Task" [ 602.566952] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.579441] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ae7023-cab4-675c-2bff-1a65229fceba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.598592] env[68638]: DEBUG nova.network.neutron [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Successfully updated port: 93a70134-a907-41d4-bce1-2bf7496b23bb {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 602.684787] env[68638]: DEBUG oslo_vmware.api [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833030, 'name': PowerOnVM_Task, 'duration_secs': 0.735813} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.685199] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 602.685508] env[68638]: INFO nova.compute.manager [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Took 9.97 seconds to spawn the instance on the hypervisor. [ 602.685704] env[68638]: DEBUG nova.compute.manager [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 602.689036] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aee86c2-8381-4133-bfb0-4a929a6c2582 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.690555] env[68638]: DEBUG nova.compute.manager [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 602.881312] env[68638]: DEBUG nova.compute.manager [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 602.901793] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.916767] env[68638]: DEBUG nova.virt.hardware [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:28:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1227569125',id=22,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-803575089',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 602.917708] env[68638]: DEBUG nova.virt.hardware [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 602.917708] env[68638]: DEBUG nova.virt.hardware [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 602.917708] env[68638]: DEBUG nova.virt.hardware [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 602.917708] env[68638]: DEBUG nova.virt.hardware [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 602.917868] env[68638]: DEBUG nova.virt.hardware [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 602.918010] env[68638]: DEBUG nova.virt.hardware [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 602.918201] env[68638]: DEBUG nova.virt.hardware [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 602.918699] env[68638]: DEBUG nova.virt.hardware [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 602.918699] env[68638]: DEBUG nova.virt.hardware [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 602.918699] env[68638]: DEBUG nova.virt.hardware [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 602.919672] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d49830-3b26-4662-b47a-b34e736c782c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.929318] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a62e86-780f-4199-a827-3a1a32d74369 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.950527] env[68638]: DEBUG nova.network.neutron [-] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.964415] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.115s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 602.967124] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.568s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.968878] env[68638]: INFO nova.compute.claims [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 602.991396] env[68638]: INFO nova.scheduler.client.report [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] Deleted allocations for instance 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944 [ 603.082662] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ae7023-cab4-675c-2bff-1a65229fceba, 'name': SearchDatastore_Task, 'duration_secs': 0.011542} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.082897] env[68638]: DEBUG nova.network.neutron [req-56cc5c29-2269-42f0-8998-705d41885757 req-755c034b-d4d8-40bb-bbed-8dd7bff83b7e service nova] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Updated VIF entry in instance network info cache for port f3550783-066c-4341-b12e-157c8000cb63. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 603.083278] env[68638]: DEBUG nova.network.neutron [req-56cc5c29-2269-42f0-8998-705d41885757 req-755c034b-d4d8-40bb-bbed-8dd7bff83b7e service nova] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Updating instance_info_cache with network_info: [{"id": "f3550783-066c-4341-b12e-157c8000cb63", "address": "fa:16:3e:f7:50:bf", "network": {"id": "24c39f5a-6de3-42b3-a5be-4e4241cad6e3", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1360563826-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f0b7a2c6272941b6ae7d296d4541e9e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3550783-06", "ovs_interfaceid": "f3550783-066c-4341-b12e-157c8000cb63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.085635] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c1e402e-968a-4c99-9e0a-ac8f9cf30cd8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.092878] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Waiting for the task: (returnval){ [ 603.092878] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e3b471-26fe-4aad-22fc-d7b133fe0a06" [ 603.092878] env[68638]: _type = "Task" [ 603.092878] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.107596] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Acquiring lock "refresh_cache-ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.107753] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Acquired lock "refresh_cache-ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 603.107926] env[68638]: DEBUG nova.network.neutron [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 603.109379] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e3b471-26fe-4aad-22fc-d7b133fe0a06, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.216195] env[68638]: INFO nova.compute.manager [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Took 22.40 seconds to build instance. [ 603.225309] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 603.422187] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Acquiring lock "ac0141c2-aef6-4edf-913a-d4a41b502c10" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 603.422494] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Lock "ac0141c2-aef6-4edf-913a-d4a41b502c10" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 603.453958] env[68638]: INFO nova.compute.manager [-] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Took 1.05 seconds to deallocate network for instance. [ 603.499616] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c62d0d4f-92a3-4893-bead-36a23f326705 tempest-DeleteServersAdminTestJSON-610272399 tempest-DeleteServersAdminTestJSON-610272399-project-admin] Lock "05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.832s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 603.587764] env[68638]: DEBUG oslo_concurrency.lockutils [req-56cc5c29-2269-42f0-8998-705d41885757 req-755c034b-d4d8-40bb-bbed-8dd7bff83b7e service nova] Releasing lock "refresh_cache-a5e993de-7aad-4b34-8946-563dc69a6f25" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 603.613790] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e3b471-26fe-4aad-22fc-d7b133fe0a06, 'name': SearchDatastore_Task, 'duration_secs': 0.011875} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.613790] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 603.614042] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] a5e993de-7aad-4b34-8946-563dc69a6f25/a5e993de-7aad-4b34-8946-563dc69a6f25.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 603.614559] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7ac902c5-4541-41c3-aa2e-28384c1ed65f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.624620] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Waiting for the task: (returnval){ [ 603.624620] env[68638]: value = "task-2833033" [ 603.624620] env[68638]: _type = "Task" [ 603.624620] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.633021] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': task-2833033, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.673545] env[68638]: DEBUG nova.network.neutron [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 603.719098] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c44b474e-01e1-49c8-a2d9-c80193b589bd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "423af2cc-4dea-445f-a01c-6d4d57c3f0de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.912s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 603.964300] env[68638]: DEBUG oslo_concurrency.lockutils [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 604.147105] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': task-2833033, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.221519] env[68638]: DEBUG nova.compute.manager [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 604.331558] env[68638]: DEBUG nova.network.neutron [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Updating instance_info_cache with network_info: [{"id": "93a70134-a907-41d4-bce1-2bf7496b23bb", "address": "fa:16:3e:d9:a0:eb", "network": {"id": "09a58246-fcd1-46a7-b760-d6ab4d363cd6", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1998833156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e4d1720c32b4e559739d6cbc868a0f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b5f9472-1844-4c99-8804-8f193cfff562", "external-id": "nsx-vlan-transportzone-445", "segmentation_id": 445, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93a70134-a9", "ovs_interfaceid": "93a70134-a907-41d4-bce1-2bf7496b23bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.442990] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-354b8ed9-a394-4304-acca-c55eeb97973d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.452021] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d038189-4e05-4097-aea2-2869d9878add {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.485707] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d294a69-9577-4493-9ca7-aa70f745d6fa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.496956] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-694adc72-a89b-4ada-91f5-9a5da5be685c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.511556] env[68638]: DEBUG nova.compute.provider_tree [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 604.637366] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': task-2833033, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.613841} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.637683] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] a5e993de-7aad-4b34-8946-563dc69a6f25/a5e993de-7aad-4b34-8946-563dc69a6f25.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 604.637775] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 604.638489] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-194ea919-3282-48bb-a9b8-c4b98328d2e9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.644316] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Waiting for the task: (returnval){ [ 604.644316] env[68638]: value = "task-2833034" [ 604.644316] env[68638]: _type = "Task" [ 604.644316] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.652585] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': task-2833034, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.747748] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 604.836023] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Releasing lock "refresh_cache-ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 604.836023] env[68638]: DEBUG nova.compute.manager [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Instance network_info: |[{"id": "93a70134-a907-41d4-bce1-2bf7496b23bb", "address": "fa:16:3e:d9:a0:eb", "network": {"id": "09a58246-fcd1-46a7-b760-d6ab4d363cd6", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1998833156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e4d1720c32b4e559739d6cbc868a0f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b5f9472-1844-4c99-8804-8f193cfff562", "external-id": "nsx-vlan-transportzone-445", "segmentation_id": 445, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93a70134-a9", "ovs_interfaceid": "93a70134-a907-41d4-bce1-2bf7496b23bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 604.836265] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:a0:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4b5f9472-1844-4c99-8804-8f193cfff562', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '93a70134-a907-41d4-bce1-2bf7496b23bb', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 604.846605] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Creating folder: Project (1e4d1720c32b4e559739d6cbc868a0f3). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 604.847069] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e31c8821-ae7d-4f3e-a405-a44cb69402e9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.860478] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Created folder: Project (1e4d1720c32b4e559739d6cbc868a0f3) in parent group-v569734. [ 604.860478] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Creating folder: Instances. Parent ref: group-v569768. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 604.860478] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6da1176b-0675-4839-8d25-d9c7b01fe0e3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.867821] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Created folder: Instances in parent group-v569768. [ 604.868108] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 604.868311] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 604.868520] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-339b582e-2623-415b-babb-0de72e5665db {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.895115] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 604.895115] env[68638]: value = "task-2833037" [ 604.895115] env[68638]: _type = "Task" [ 604.895115] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.909504] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833037, 'name': CreateVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.015124] env[68638]: DEBUG nova.scheduler.client.report [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 605.104132] env[68638]: DEBUG nova.network.neutron [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Successfully updated port: 173c13c1-a5ec-4a6b-98d8-e039626a047e {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 605.156774] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': task-2833034, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.208353} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.157091] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 605.157950] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5ee26da-9471-4712-8259-c8409fbcd634 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.182811] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] a5e993de-7aad-4b34-8946-563dc69a6f25/a5e993de-7aad-4b34-8946-563dc69a6f25.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 605.183179] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b488ab7a-181c-4a4a-b7ec-db661d6bb3cb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.204819] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Waiting for the task: (returnval){ [ 605.204819] env[68638]: value = "task-2833038" [ 605.204819] env[68638]: _type = "Task" [ 605.204819] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.215903] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': task-2833038, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.254981] env[68638]: DEBUG nova.compute.manager [req-512c2af1-b1ab-4a21-9dcc-cc3deef71275 req-7324fe1f-dc5d-4ba7-ac55-062d629e5d32 service nova] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Received event network-vif-plugged-93a70134-a907-41d4-bce1-2bf7496b23bb {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 605.254981] env[68638]: DEBUG oslo_concurrency.lockutils [req-512c2af1-b1ab-4a21-9dcc-cc3deef71275 req-7324fe1f-dc5d-4ba7-ac55-062d629e5d32 service nova] Acquiring lock "ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.255306] env[68638]: DEBUG oslo_concurrency.lockutils [req-512c2af1-b1ab-4a21-9dcc-cc3deef71275 req-7324fe1f-dc5d-4ba7-ac55-062d629e5d32 service nova] Lock "ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 605.255527] env[68638]: DEBUG oslo_concurrency.lockutils [req-512c2af1-b1ab-4a21-9dcc-cc3deef71275 req-7324fe1f-dc5d-4ba7-ac55-062d629e5d32 service nova] Lock "ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 605.255703] env[68638]: DEBUG nova.compute.manager [req-512c2af1-b1ab-4a21-9dcc-cc3deef71275 req-7324fe1f-dc5d-4ba7-ac55-062d629e5d32 service nova] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] No waiting events found dispatching network-vif-plugged-93a70134-a907-41d4-bce1-2bf7496b23bb {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 605.256475] env[68638]: WARNING nova.compute.manager [req-512c2af1-b1ab-4a21-9dcc-cc3deef71275 req-7324fe1f-dc5d-4ba7-ac55-062d629e5d32 service nova] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Received unexpected event network-vif-plugged-93a70134-a907-41d4-bce1-2bf7496b23bb for instance with vm_state building and task_state spawning. [ 605.256475] env[68638]: DEBUG nova.compute.manager [req-512c2af1-b1ab-4a21-9dcc-cc3deef71275 req-7324fe1f-dc5d-4ba7-ac55-062d629e5d32 service nova] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Received event network-changed-93a70134-a907-41d4-bce1-2bf7496b23bb {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 605.256475] env[68638]: DEBUG nova.compute.manager [req-512c2af1-b1ab-4a21-9dcc-cc3deef71275 req-7324fe1f-dc5d-4ba7-ac55-062d629e5d32 service nova] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Refreshing instance network info cache due to event network-changed-93a70134-a907-41d4-bce1-2bf7496b23bb. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 605.256475] env[68638]: DEBUG oslo_concurrency.lockutils [req-512c2af1-b1ab-4a21-9dcc-cc3deef71275 req-7324fe1f-dc5d-4ba7-ac55-062d629e5d32 service nova] Acquiring lock "refresh_cache-ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.256646] env[68638]: DEBUG oslo_concurrency.lockutils [req-512c2af1-b1ab-4a21-9dcc-cc3deef71275 req-7324fe1f-dc5d-4ba7-ac55-062d629e5d32 service nova] Acquired lock "refresh_cache-ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 605.257174] env[68638]: DEBUG nova.network.neutron [req-512c2af1-b1ab-4a21-9dcc-cc3deef71275 req-7324fe1f-dc5d-4ba7-ac55-062d629e5d32 service nova] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Refreshing network info cache for port 93a70134-a907-41d4-bce1-2bf7496b23bb {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 605.363397] env[68638]: INFO nova.compute.manager [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Rebuilding instance [ 605.413415] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833037, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.449722] env[68638]: DEBUG nova.compute.manager [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 605.450939] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8443608f-53e6-4892-920e-177ed7955d86 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.527974] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.558s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 605.527974] env[68638]: DEBUG nova.compute.manager [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 605.537069] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.110s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 605.543713] env[68638]: INFO nova.compute.claims [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 605.614737] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Acquiring lock "refresh_cache-4b5c5b9e-389d-4ed9-a860-bd41a33fbac4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.614891] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Acquired lock "refresh_cache-4b5c5b9e-389d-4ed9-a860-bd41a33fbac4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 605.615060] env[68638]: DEBUG nova.network.neutron [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 605.721171] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': task-2833038, 'name': ReconfigVM_Task, 'duration_secs': 0.400429} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.722495] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Reconfigured VM instance instance-0000000b to attach disk [datastore1] a5e993de-7aad-4b34-8946-563dc69a6f25/a5e993de-7aad-4b34-8946-563dc69a6f25.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 605.722779] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3496d06b-1ef7-4b13-bcc4-a43f9ea7b38e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.732502] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Waiting for the task: (returnval){ [ 605.732502] env[68638]: value = "task-2833039" [ 605.732502] env[68638]: _type = "Task" [ 605.732502] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.752951] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': task-2833039, 'name': Rename_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.913284] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833037, 'name': CreateVM_Task, 'duration_secs': 0.574762} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.913465] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 605.914560] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.914780] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 605.915412] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 605.921230] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48045bb2-c3a6-4d19-9fdf-0bddfc7c8c46 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.925606] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Waiting for the task: (returnval){ [ 605.925606] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]525def00-7ea2-6731-ac44-1e385e0aa80b" [ 605.925606] env[68638]: _type = "Task" [ 605.925606] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.935615] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525def00-7ea2-6731-ac44-1e385e0aa80b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.039207] env[68638]: DEBUG nova.compute.utils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 606.044196] env[68638]: DEBUG nova.compute.manager [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 606.044546] env[68638]: DEBUG nova.network.neutron [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 606.167287] env[68638]: DEBUG nova.compute.manager [req-f19bc4d9-9f39-46d5-8184-9192622e3c63 req-efdd47b5-e179-419f-b009-4199d41aa994 service nova] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Received event network-vif-plugged-173c13c1-a5ec-4a6b-98d8-e039626a047e {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 606.167616] env[68638]: DEBUG oslo_concurrency.lockutils [req-f19bc4d9-9f39-46d5-8184-9192622e3c63 req-efdd47b5-e179-419f-b009-4199d41aa994 service nova] Acquiring lock "4b5c5b9e-389d-4ed9-a860-bd41a33fbac4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.167812] env[68638]: DEBUG oslo_concurrency.lockutils [req-f19bc4d9-9f39-46d5-8184-9192622e3c63 req-efdd47b5-e179-419f-b009-4199d41aa994 service nova] Lock "4b5c5b9e-389d-4ed9-a860-bd41a33fbac4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.167981] env[68638]: DEBUG oslo_concurrency.lockutils [req-f19bc4d9-9f39-46d5-8184-9192622e3c63 req-efdd47b5-e179-419f-b009-4199d41aa994 service nova] Lock "4b5c5b9e-389d-4ed9-a860-bd41a33fbac4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 606.168161] env[68638]: DEBUG nova.compute.manager [req-f19bc4d9-9f39-46d5-8184-9192622e3c63 req-efdd47b5-e179-419f-b009-4199d41aa994 service nova] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] No waiting events found dispatching network-vif-plugged-173c13c1-a5ec-4a6b-98d8-e039626a047e {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 606.168335] env[68638]: WARNING nova.compute.manager [req-f19bc4d9-9f39-46d5-8184-9192622e3c63 req-efdd47b5-e179-419f-b009-4199d41aa994 service nova] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Received unexpected event network-vif-plugged-173c13c1-a5ec-4a6b-98d8-e039626a047e for instance with vm_state building and task_state spawning. [ 606.207808] env[68638]: DEBUG nova.policy [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1d2be8827bf74d62b1f6245cdcfd9d8d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dce3b1af81bc4cec877ef5a7e6999a7f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 606.240749] env[68638]: DEBUG nova.network.neutron [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 606.249328] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': task-2833039, 'name': Rename_Task, 'duration_secs': 0.211769} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.249629] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 606.249885] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-70fb10f2-114d-4eba-b081-dd41a83e0488 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.258906] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Waiting for the task: (returnval){ [ 606.258906] env[68638]: value = "task-2833040" [ 606.258906] env[68638]: _type = "Task" [ 606.258906] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.270141] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': task-2833040, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.312013] env[68638]: DEBUG nova.network.neutron [req-512c2af1-b1ab-4a21-9dcc-cc3deef71275 req-7324fe1f-dc5d-4ba7-ac55-062d629e5d32 service nova] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Updated VIF entry in instance network info cache for port 93a70134-a907-41d4-bce1-2bf7496b23bb. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 606.312013] env[68638]: DEBUG nova.network.neutron [req-512c2af1-b1ab-4a21-9dcc-cc3deef71275 req-7324fe1f-dc5d-4ba7-ac55-062d629e5d32 service nova] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Updating instance_info_cache with network_info: [{"id": "93a70134-a907-41d4-bce1-2bf7496b23bb", "address": "fa:16:3e:d9:a0:eb", "network": {"id": "09a58246-fcd1-46a7-b760-d6ab4d363cd6", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1998833156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e4d1720c32b4e559739d6cbc868a0f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b5f9472-1844-4c99-8804-8f193cfff562", "external-id": "nsx-vlan-transportzone-445", "segmentation_id": 445, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93a70134-a9", "ovs_interfaceid": "93a70134-a907-41d4-bce1-2bf7496b23bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.443811] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525def00-7ea2-6731-ac44-1e385e0aa80b, 'name': SearchDatastore_Task, 'duration_secs': 0.015391} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.443811] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 606.444117] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 606.444173] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.444324] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 606.444759] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 606.444840] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-85fd3da2-f7b5-4841-a23b-f9490e5f0b6a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.462348] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 606.462563] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 606.463345] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d87a9dbf-8b41-4e8b-aecc-9143feb63fc6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.469201] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Waiting for the task: (returnval){ [ 606.469201] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5221c961-b7f8-5ae9-58ca-01ff6ec4482a" [ 606.469201] env[68638]: _type = "Task" [ 606.469201] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.473876] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 606.474143] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-61d0a11b-ee58-4cc4-a343-bf47eeef4548 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.479623] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5221c961-b7f8-5ae9-58ca-01ff6ec4482a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.486171] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Waiting for the task: (returnval){ [ 606.486171] env[68638]: value = "task-2833041" [ 606.486171] env[68638]: _type = "Task" [ 606.486171] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.498052] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833041, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.554513] env[68638]: DEBUG nova.compute.manager [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 606.774300] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': task-2833040, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.817482] env[68638]: DEBUG oslo_concurrency.lockutils [req-512c2af1-b1ab-4a21-9dcc-cc3deef71275 req-7324fe1f-dc5d-4ba7-ac55-062d629e5d32 service nova] Releasing lock "refresh_cache-ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 606.942697] env[68638]: DEBUG nova.network.neutron [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Updating instance_info_cache with network_info: [{"id": "173c13c1-a5ec-4a6b-98d8-e039626a047e", "address": "fa:16:3e:df:e0:03", "network": {"id": "d9d5f06a-7963-44e3-8a0a-c839c3624857", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-336879211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d127964153f4854b10dfc8f8eb0009d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffcecdaa-a7b8-49fc-9371-dbdb7744688e", "external-id": "nsx-vlan-transportzone-994", "segmentation_id": 994, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap173c13c1-a5", "ovs_interfaceid": "173c13c1-a5ec-4a6b-98d8-e039626a047e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.986778] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5221c961-b7f8-5ae9-58ca-01ff6ec4482a, 'name': SearchDatastore_Task, 'duration_secs': 0.044523} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.987186] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70815585-d44c-4620-a85b-c7b7c7f4edbf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.000377] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Waiting for the task: (returnval){ [ 607.000377] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52977a9d-6ce5-f2ae-3c9c-1a5e6d974cc7" [ 607.000377] env[68638]: _type = "Task" [ 607.000377] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.003902] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833041, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.015134] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52977a9d-6ce5-f2ae-3c9c-1a5e6d974cc7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.065766] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904d84a8-906f-4bd5-bc08-afcf5c00908e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.076087] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b19d6d-63a6-4256-b4ef-a5ae7cc78d10 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.116833] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cf628fc-62a4-4e60-ba8c-d40b5bbcb19c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.126268] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f59d1f-01bf-4684-8b86-7ba14ce1d2f6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.141792] env[68638]: DEBUG nova.compute.provider_tree [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 607.271050] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': task-2833040, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.447459] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Releasing lock "refresh_cache-4b5c5b9e-389d-4ed9-a860-bd41a33fbac4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 607.450038] env[68638]: DEBUG nova.compute.manager [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Instance network_info: |[{"id": "173c13c1-a5ec-4a6b-98d8-e039626a047e", "address": "fa:16:3e:df:e0:03", "network": {"id": "d9d5f06a-7963-44e3-8a0a-c839c3624857", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-336879211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d127964153f4854b10dfc8f8eb0009d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffcecdaa-a7b8-49fc-9371-dbdb7744688e", "external-id": "nsx-vlan-transportzone-994", "segmentation_id": 994, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap173c13c1-a5", "ovs_interfaceid": "173c13c1-a5ec-4a6b-98d8-e039626a047e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 607.450314] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:e0:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ffcecdaa-a7b8-49fc-9371-dbdb7744688e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '173c13c1-a5ec-4a6b-98d8-e039626a047e', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 607.457723] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Creating folder: Project (6d127964153f4854b10dfc8f8eb0009d). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 607.457825] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9a3275f2-525a-4aba-9ec5-e57b1157c14b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.469819] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Created folder: Project (6d127964153f4854b10dfc8f8eb0009d) in parent group-v569734. [ 607.470078] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Creating folder: Instances. Parent ref: group-v569771. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 607.470434] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-51f56a49-859e-4333-98bc-f4f90a96a79c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.482728] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Created folder: Instances in parent group-v569771. [ 607.482728] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 607.482912] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 607.483310] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c40aaed2-0a9c-416b-9645-b67ee30675b4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.506621] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833041, 'name': PowerOffVM_Task, 'duration_secs': 0.684059} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.510328] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 607.510439] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 607.510711] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 607.510711] env[68638]: value = "task-2833044" [ 607.510711] env[68638]: _type = "Task" [ 607.510711] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.511679] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a511efa6-b790-41de-946e-e08a3092361d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.523236] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52977a9d-6ce5-f2ae-3c9c-1a5e6d974cc7, 'name': SearchDatastore_Task, 'duration_secs': 0.035416} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.523889] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 607.524270] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac/ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 607.524391] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ff11a6b7-c5a7-4394-bbeb-cdeddbc49240 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.532471] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 607.532631] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833044, 'name': CreateVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.533119] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-94f19a2e-1f92-4c6d-a0cd-644d30209932 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.539094] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Waiting for the task: (returnval){ [ 607.539094] env[68638]: value = "task-2833045" [ 607.539094] env[68638]: _type = "Task" [ 607.539094] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.548382] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Task: {'id': task-2833045, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.564658] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 607.564917] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 607.565140] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Deleting the datastore file [datastore1] c71693e9-aeaa-4f12-b5cf-a179e558505d {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 607.565428] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50cbe16b-7c29-42d6-9d56-391a5fc0a10c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.574677] env[68638]: DEBUG nova.compute.manager [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 607.577318] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Waiting for the task: (returnval){ [ 607.577318] env[68638]: value = "task-2833047" [ 607.577318] env[68638]: _type = "Task" [ 607.577318] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.586891] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833047, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.609690] env[68638]: DEBUG nova.virt.hardware [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 607.610191] env[68638]: DEBUG nova.virt.hardware [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 607.610191] env[68638]: DEBUG nova.virt.hardware [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 607.610435] env[68638]: DEBUG nova.virt.hardware [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 607.610859] env[68638]: DEBUG nova.virt.hardware [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 607.611145] env[68638]: DEBUG nova.virt.hardware [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 607.611450] env[68638]: DEBUG nova.virt.hardware [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 607.611687] env[68638]: DEBUG nova.virt.hardware [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 607.611950] env[68638]: DEBUG nova.virt.hardware [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 607.612179] env[68638]: DEBUG nova.virt.hardware [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 607.612466] env[68638]: DEBUG nova.virt.hardware [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 607.613570] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa806059-d80f-48df-ac1c-e908d24c8a0f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.624411] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca48ef12-81f6-4ae8-8379-9457b5a0ae10 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.647029] env[68638]: DEBUG nova.scheduler.client.report [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 607.778921] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': task-2833040, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.900235] env[68638]: DEBUG nova.network.neutron [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Successfully created port: 951987e2-f8ec-4ab6-a168-7db5fd4bb37c {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 607.951471] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Acquiring lock "4eb4360a-46a8-440b-b300-4724c3497ff2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 607.951610] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Lock "4eb4360a-46a8-440b-b300-4724c3497ff2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.029420] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833044, 'name': CreateVM_Task, 'duration_secs': 0.498294} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.029420] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 608.030314] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.030964] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 608.030964] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 608.031094] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5af2d646-f72d-4ff4-a2a6-4dcc362918f4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.039070] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for the task: (returnval){ [ 608.039070] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52b561fc-72e1-6619-03d3-668f8eb3233c" [ 608.039070] env[68638]: _type = "Task" [ 608.039070] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.059679] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Task: {'id': task-2833045, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.060043] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b561fc-72e1-6619-03d3-668f8eb3233c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.090033] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833047, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.11243} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.090502] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 608.090502] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 608.090655] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 608.153881] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.617s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 608.154433] env[68638]: DEBUG nova.compute.manager [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 608.158347] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.692s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.163848] env[68638]: INFO nova.compute.claims [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 608.275832] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': task-2833040, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.561886] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b561fc-72e1-6619-03d3-668f8eb3233c, 'name': SearchDatastore_Task, 'duration_secs': 0.016404} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.568101] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 608.568473] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 608.568778] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.568982] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 608.569261] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 608.570668] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Task: {'id': task-2833045, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.54969} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.570668] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f69673fc-5eb4-4ebc-9a4f-85603a3b811a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.573321] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac/ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 608.573541] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 608.573861] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b910040d-6b08-48c5-8bc4-b7ac94ecc9a8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.581636] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Waiting for the task: (returnval){ [ 608.581636] env[68638]: value = "task-2833048" [ 608.581636] env[68638]: _type = "Task" [ 608.581636] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.586231] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 608.586231] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 608.593249] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01940ede-5faa-4607-b590-021a2dae1f19 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.607359] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Task: {'id': task-2833048, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.609456] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for the task: (returnval){ [ 608.609456] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]523db0d8-bed3-bd8f-d7c0-f777c68bb6ed" [ 608.609456] env[68638]: _type = "Task" [ 608.609456] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.623427] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523db0d8-bed3-bd8f-d7c0-f777c68bb6ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.642219] env[68638]: DEBUG nova.compute.manager [req-83f2820a-a9b3-4e5d-bb0b-3584e1f0debe req-d6a13169-6732-43c7-a930-9a392155e7d7 service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Received event network-changed-316407a1-ab13-4bd4-98ef-7e090d54399c {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 608.642219] env[68638]: DEBUG nova.compute.manager [req-83f2820a-a9b3-4e5d-bb0b-3584e1f0debe req-d6a13169-6732-43c7-a930-9a392155e7d7 service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Refreshing instance network info cache due to event network-changed-316407a1-ab13-4bd4-98ef-7e090d54399c. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 608.643088] env[68638]: DEBUG oslo_concurrency.lockutils [req-83f2820a-a9b3-4e5d-bb0b-3584e1f0debe req-d6a13169-6732-43c7-a930-9a392155e7d7 service nova] Acquiring lock "refresh_cache-14772ba8-bde2-42ef-9a37-df876c8af321" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.643425] env[68638]: DEBUG oslo_concurrency.lockutils [req-83f2820a-a9b3-4e5d-bb0b-3584e1f0debe req-d6a13169-6732-43c7-a930-9a392155e7d7 service nova] Acquired lock "refresh_cache-14772ba8-bde2-42ef-9a37-df876c8af321" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 608.643739] env[68638]: DEBUG nova.network.neutron [req-83f2820a-a9b3-4e5d-bb0b-3584e1f0debe req-d6a13169-6732-43c7-a930-9a392155e7d7 service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Refreshing network info cache for port 316407a1-ab13-4bd4-98ef-7e090d54399c {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 608.669130] env[68638]: DEBUG nova.compute.utils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 608.679966] env[68638]: DEBUG nova.compute.manager [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 608.679966] env[68638]: DEBUG nova.network.neutron [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 608.775640] env[68638]: DEBUG oslo_vmware.api [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': task-2833040, 'name': PowerOnVM_Task, 'duration_secs': 2.197972} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.775640] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 608.775640] env[68638]: INFO nova.compute.manager [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Took 11.10 seconds to spawn the instance on the hypervisor. [ 608.775640] env[68638]: DEBUG nova.compute.manager [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 608.776119] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d269d484-6e9c-4909-b3f3-d96c1e076a90 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.804200] env[68638]: DEBUG nova.policy [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1d2be8827bf74d62b1f6245cdcfd9d8d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dce3b1af81bc4cec877ef5a7e6999a7f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 608.841116] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "4edaaa5d-535a-4c63-ab44-724548a0f3eb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.841373] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "4edaaa5d-535a-4c63-ab44-724548a0f3eb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 609.109785] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Task: {'id': task-2833048, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07298} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.109785] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 609.110658] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7203e259-4c2c-44e7-829f-70f327d1d25d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.129527] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523db0d8-bed3-bd8f-d7c0-f777c68bb6ed, 'name': SearchDatastore_Task, 'duration_secs': 0.014002} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.151367] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Reconfiguring VM instance instance-0000000c to attach disk [datastore2] ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac/ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 609.151658] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c31ab791-3bfc-4b6b-9ed5-3636bccebd61 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.157341] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a6ac25d-4e66-4b86-a55d-6e9edd8c9be8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.175745] env[68638]: DEBUG nova.virt.hardware [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 609.178948] env[68638]: DEBUG nova.virt.hardware [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 609.178948] env[68638]: DEBUG nova.virt.hardware [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 609.178948] env[68638]: DEBUG nova.virt.hardware [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 609.178948] env[68638]: DEBUG nova.virt.hardware [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 609.178948] env[68638]: DEBUG nova.virt.hardware [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 609.179480] env[68638]: DEBUG nova.virt.hardware [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 609.179480] env[68638]: DEBUG nova.virt.hardware [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 609.179480] env[68638]: DEBUG nova.virt.hardware [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 609.179480] env[68638]: DEBUG nova.virt.hardware [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 609.179480] env[68638]: DEBUG nova.virt.hardware [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 609.179705] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8592e2cd-9a3c-49d2-ba97-008e374051de {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.182935] env[68638]: DEBUG nova.compute.manager [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 609.191888] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Waiting for the task: (returnval){ [ 609.191888] env[68638]: value = "task-2833049" [ 609.191888] env[68638]: _type = "Task" [ 609.191888] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.192414] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for the task: (returnval){ [ 609.192414] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]526d6cf6-deb4-f7a8-02ef-1d099d5b2973" [ 609.192414] env[68638]: _type = "Task" [ 609.192414] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.202990] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b330a2-6113-49ae-86f9-2fa08e6546d0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.213524] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Task: {'id': task-2833049, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.214616] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]526d6cf6-deb4-f7a8-02ef-1d099d5b2973, 'name': SearchDatastore_Task, 'duration_secs': 0.010838} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.214616] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 609.215646] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4/4b5c5b9e-389d-4ed9-a860-bd41a33fbac4.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 609.215646] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-92b57bd4-8266-426e-ab0e-a045eb54ca09 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.226779] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Instance VIF info [] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 609.237030] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 609.241294] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 609.241731] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-00f7f831-31f8-4d9d-bf95-4495bf3c6cf4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.255909] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for the task: (returnval){ [ 609.255909] env[68638]: value = "task-2833050" [ 609.255909] env[68638]: _type = "Task" [ 609.255909] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.263322] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 609.263322] env[68638]: value = "task-2833051" [ 609.263322] env[68638]: _type = "Task" [ 609.263322] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.270424] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833050, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.275543] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833051, 'name': CreateVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.303802] env[68638]: INFO nova.compute.manager [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Took 26.23 seconds to build instance. [ 609.441784] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Acquiring lock "54af9c38-c8b6-4ef9-be63-de545dcc0da5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 609.443139] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Lock "54af9c38-c8b6-4ef9-be63-de545dcc0da5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 609.443139] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Acquiring lock "54af9c38-c8b6-4ef9-be63-de545dcc0da5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 609.443139] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Lock "54af9c38-c8b6-4ef9-be63-de545dcc0da5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 609.443139] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Lock "54af9c38-c8b6-4ef9-be63-de545dcc0da5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 609.446675] env[68638]: INFO nova.compute.manager [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Terminating instance [ 609.474578] env[68638]: DEBUG nova.compute.manager [req-c50d0f09-bde3-48c3-8fc8-d066e881f936 req-ca06571b-c901-4060-a92c-0865a718ec68 service nova] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Received event network-changed-d9c26596-0dec-45f8-9efd-781be344a670 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 609.475268] env[68638]: DEBUG nova.compute.manager [req-c50d0f09-bde3-48c3-8fc8-d066e881f936 req-ca06571b-c901-4060-a92c-0865a718ec68 service nova] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Refreshing instance network info cache due to event network-changed-d9c26596-0dec-45f8-9efd-781be344a670. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 609.475268] env[68638]: DEBUG oslo_concurrency.lockutils [req-c50d0f09-bde3-48c3-8fc8-d066e881f936 req-ca06571b-c901-4060-a92c-0865a718ec68 service nova] Acquiring lock "refresh_cache-c80895d5-1a59-4779-9da9-9aeec10bc395" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.475455] env[68638]: DEBUG oslo_concurrency.lockutils [req-c50d0f09-bde3-48c3-8fc8-d066e881f936 req-ca06571b-c901-4060-a92c-0865a718ec68 service nova] Acquired lock "refresh_cache-c80895d5-1a59-4779-9da9-9aeec10bc395" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 609.475705] env[68638]: DEBUG nova.network.neutron [req-c50d0f09-bde3-48c3-8fc8-d066e881f936 req-ca06571b-c901-4060-a92c-0865a718ec68 service nova] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Refreshing network info cache for port d9c26596-0dec-45f8-9efd-781be344a670 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 609.559143] env[68638]: DEBUG nova.network.neutron [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Successfully created port: 5ee0473a-5eb7-4b42-b970-cb92565f8dd5 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 609.713326] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Task: {'id': task-2833049, 'name': ReconfigVM_Task, 'duration_secs': 0.376704} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.717107] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Reconfigured VM instance instance-0000000c to attach disk [datastore2] ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac/ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 609.718814] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-94e487d0-18b5-4d47-8734-b6e2ded21478 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.725671] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Waiting for the task: (returnval){ [ 609.725671] env[68638]: value = "task-2833052" [ 609.725671] env[68638]: _type = "Task" [ 609.725671] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.735370] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Task: {'id': task-2833052, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.736955] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a542e99c-3ede-4f6d-b03b-087caf38ab73 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.746769] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1beab0-e3d2-4075-9295-5f3cbbe01eec {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.786432] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0ddaf4-9964-4bde-ba32-8c8a941482b7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.794825] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833050, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.799933] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833051, 'name': CreateVM_Task, 'duration_secs': 0.337974} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.800184] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 609.801576] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54710fae-a5c8-4577-b1e6-a758c23e6055 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.805382] env[68638]: DEBUG oslo_concurrency.lockutils [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.805539] env[68638]: DEBUG oslo_concurrency.lockutils [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 609.805966] env[68638]: DEBUG oslo_concurrency.lockutils [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 609.806524] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9d87afe1-1a33-470d-a879-bd31aa8e16da tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Lock "a5e993de-7aad-4b34-8946-563dc69a6f25" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.754s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 609.806753] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecdd1996-ce8d-4edc-b352-666cb1531957 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.814553] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Waiting for the task: (returnval){ [ 609.814553] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52359b4e-fc51-3d27-b62a-a370af685729" [ 609.814553] env[68638]: _type = "Task" [ 609.814553] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.823485] env[68638]: DEBUG nova.compute.provider_tree [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 609.838330] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52359b4e-fc51-3d27-b62a-a370af685729, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.957938] env[68638]: DEBUG nova.compute.manager [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 609.958471] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 609.960275] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9826a12f-db9a-46f8-8e90-b5ba89d4fc11 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.972317] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 609.976371] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1c14cfb1-04bd-429a-b4f3-611a3503d73a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.987046] env[68638]: DEBUG oslo_vmware.api [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Waiting for the task: (returnval){ [ 609.987046] env[68638]: value = "task-2833053" [ 609.987046] env[68638]: _type = "Task" [ 609.987046] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.990589] env[68638]: DEBUG nova.network.neutron [req-83f2820a-a9b3-4e5d-bb0b-3584e1f0debe req-d6a13169-6732-43c7-a930-9a392155e7d7 service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Updated VIF entry in instance network info cache for port 316407a1-ab13-4bd4-98ef-7e090d54399c. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 609.991140] env[68638]: DEBUG nova.network.neutron [req-83f2820a-a9b3-4e5d-bb0b-3584e1f0debe req-d6a13169-6732-43c7-a930-9a392155e7d7 service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Updating instance_info_cache with network_info: [{"id": "316407a1-ab13-4bd4-98ef-7e090d54399c", "address": "fa:16:3e:ca:38:c2", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap316407a1-ab", "ovs_interfaceid": "316407a1-ab13-4bd4-98ef-7e090d54399c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.003761] env[68638]: DEBUG oslo_vmware.api [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Task: {'id': task-2833053, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.135695] env[68638]: DEBUG nova.network.neutron [req-c50d0f09-bde3-48c3-8fc8-d066e881f936 req-ca06571b-c901-4060-a92c-0865a718ec68 service nova] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Updated VIF entry in instance network info cache for port d9c26596-0dec-45f8-9efd-781be344a670. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 610.136081] env[68638]: DEBUG nova.network.neutron [req-c50d0f09-bde3-48c3-8fc8-d066e881f936 req-ca06571b-c901-4060-a92c-0865a718ec68 service nova] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Updating instance_info_cache with network_info: [{"id": "d9c26596-0dec-45f8-9efd-781be344a670", "address": "fa:16:3e:89:87:69", "network": {"id": "26f8ee83-eaa2-4da8-8f5d-7de1d08f75ef", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-344816381-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "269a5618b37e42189dca254a5073c269", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33fdc099-7497-41c1-b40c-1558937132d4", "external-id": "nsx-vlan-transportzone-764", "segmentation_id": 764, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9c26596-0d", "ovs_interfaceid": "d9c26596-0dec-45f8-9efd-781be344a670", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.201985] env[68638]: DEBUG nova.compute.manager [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 610.238923] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Task: {'id': task-2833052, 'name': Rename_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.245080] env[68638]: DEBUG nova.virt.hardware [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 610.245080] env[68638]: DEBUG nova.virt.hardware [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 610.245080] env[68638]: DEBUG nova.virt.hardware [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 610.245326] env[68638]: DEBUG nova.virt.hardware [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 610.245511] env[68638]: DEBUG nova.virt.hardware [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 610.245511] env[68638]: DEBUG nova.virt.hardware [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 610.245706] env[68638]: DEBUG nova.virt.hardware [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 610.245880] env[68638]: DEBUG nova.virt.hardware [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 610.246026] env[68638]: DEBUG nova.virt.hardware [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 610.246213] env[68638]: DEBUG nova.virt.hardware [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 610.246387] env[68638]: DEBUG nova.virt.hardware [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 610.247370] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09176b33-5dee-4be8-9253-748c0e87f3f3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.255712] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e45cfd98-8f18-4da7-9fba-ac5d83e25ad2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.290635] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833050, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.588145} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.290635] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4/4b5c5b9e-389d-4ed9-a860-bd41a33fbac4.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 610.290635] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 610.290635] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7e5ad74d-7cd3-4fd1-ac74-cfc4f68899d6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.301583] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for the task: (returnval){ [ 610.301583] env[68638]: value = "task-2833054" [ 610.301583] env[68638]: _type = "Task" [ 610.301583] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.312989] env[68638]: DEBUG nova.compute.manager [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 610.317009] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833054, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.332643] env[68638]: DEBUG nova.scheduler.client.report [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 610.340437] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52359b4e-fc51-3d27-b62a-a370af685729, 'name': SearchDatastore_Task, 'duration_secs': 0.01997} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.341096] env[68638]: DEBUG oslo_concurrency.lockutils [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 610.341459] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 610.341971] env[68638]: DEBUG oslo_concurrency.lockutils [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.341971] env[68638]: DEBUG oslo_concurrency.lockutils [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 610.342116] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 610.342359] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b99d6414-a41b-4f37-8be7-0b9dca75e307 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.351409] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 610.353152] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 610.353957] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af0c4259-c177-4db1-b654-02623486598d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.362559] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Waiting for the task: (returnval){ [ 610.362559] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]529394ed-b35a-a88e-a35b-3802cdc2a61c" [ 610.362559] env[68638]: _type = "Task" [ 610.362559] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.370163] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]529394ed-b35a-a88e-a35b-3802cdc2a61c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.497372] env[68638]: DEBUG oslo_vmware.api [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Task: {'id': task-2833053, 'name': PowerOffVM_Task, 'duration_secs': 0.336478} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.497642] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 610.497799] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 610.498293] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-15a1e991-8a9c-45f6-baea-bf86fc7a535c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.500768] env[68638]: DEBUG oslo_concurrency.lockutils [req-83f2820a-a9b3-4e5d-bb0b-3584e1f0debe req-d6a13169-6732-43c7-a930-9a392155e7d7 service nova] Releasing lock "refresh_cache-14772ba8-bde2-42ef-9a37-df876c8af321" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 610.618491] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 610.620897] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 610.621392] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Deleting the datastore file [datastore1] 54af9c38-c8b6-4ef9-be63-de545dcc0da5 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 610.621671] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8f2afe65-c466-42ee-8648-ac4d74a3d01b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.629428] env[68638]: DEBUG oslo_vmware.api [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Waiting for the task: (returnval){ [ 610.629428] env[68638]: value = "task-2833056" [ 610.629428] env[68638]: _type = "Task" [ 610.629428] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.644617] env[68638]: DEBUG oslo_concurrency.lockutils [req-c50d0f09-bde3-48c3-8fc8-d066e881f936 req-ca06571b-c901-4060-a92c-0865a718ec68 service nova] Releasing lock "refresh_cache-c80895d5-1a59-4779-9da9-9aeec10bc395" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 610.644617] env[68638]: DEBUG nova.compute.manager [req-c50d0f09-bde3-48c3-8fc8-d066e881f936 req-ca06571b-c901-4060-a92c-0865a718ec68 service nova] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Received event network-changed-173c13c1-a5ec-4a6b-98d8-e039626a047e {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 610.644617] env[68638]: DEBUG nova.compute.manager [req-c50d0f09-bde3-48c3-8fc8-d066e881f936 req-ca06571b-c901-4060-a92c-0865a718ec68 service nova] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Refreshing instance network info cache due to event network-changed-173c13c1-a5ec-4a6b-98d8-e039626a047e. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 610.644861] env[68638]: DEBUG oslo_concurrency.lockutils [req-c50d0f09-bde3-48c3-8fc8-d066e881f936 req-ca06571b-c901-4060-a92c-0865a718ec68 service nova] Acquiring lock "refresh_cache-4b5c5b9e-389d-4ed9-a860-bd41a33fbac4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.645091] env[68638]: DEBUG oslo_concurrency.lockutils [req-c50d0f09-bde3-48c3-8fc8-d066e881f936 req-ca06571b-c901-4060-a92c-0865a718ec68 service nova] Acquired lock "refresh_cache-4b5c5b9e-389d-4ed9-a860-bd41a33fbac4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 610.645185] env[68638]: DEBUG nova.network.neutron [req-c50d0f09-bde3-48c3-8fc8-d066e881f936 req-ca06571b-c901-4060-a92c-0865a718ec68 service nova] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Refreshing network info cache for port 173c13c1-a5ec-4a6b-98d8-e039626a047e {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 610.648021] env[68638]: DEBUG oslo_vmware.api [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Task: {'id': task-2833056, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.738895] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Task: {'id': task-2833052, 'name': Rename_Task, 'duration_secs': 0.973316} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.739245] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 610.739989] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-778d5c51-6a39-4fd2-b035-e802387b5d93 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.748629] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Waiting for the task: (returnval){ [ 610.748629] env[68638]: value = "task-2833057" [ 610.748629] env[68638]: _type = "Task" [ 610.748629] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.760985] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Task: {'id': task-2833057, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.763393] env[68638]: DEBUG nova.network.neutron [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Successfully updated port: 951987e2-f8ec-4ab6-a168-7db5fd4bb37c {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 610.811610] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833054, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.339663} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.811888] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 610.812745] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b565f832-5021-4aa5-9b62-9c42947db420 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.823067] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "9c0d1c2d-88ea-40be-aef1-43b37b4dca3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.823313] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "9c0d1c2d-88ea-40be-aef1-43b37b4dca3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.847134] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4/4b5c5b9e-389d-4ed9-a860-bd41a33fbac4.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 610.851758] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.694s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 610.852347] env[68638]: DEBUG nova.compute.manager [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 610.854974] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0e4cea2-556f-4dce-a356-14d595c8a482 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.871433] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 12.986s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.883051] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]529394ed-b35a-a88e-a35b-3802cdc2a61c, 'name': SearchDatastore_Task, 'duration_secs': 0.00867} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.885048] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for the task: (returnval){ [ 610.885048] env[68638]: value = "task-2833058" [ 610.885048] env[68638]: _type = "Task" [ 610.885048] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.885253] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15d72d93-dce9-430e-a101-cc7ed4133cee {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.892456] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.894413] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Waiting for the task: (returnval){ [ 610.894413] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52cd7d00-71b8-551f-141b-e76e98ae2114" [ 610.894413] env[68638]: _type = "Task" [ 610.894413] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.898487] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833058, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.908469] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52cd7d00-71b8-551f-141b-e76e98ae2114, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.147469] env[68638]: DEBUG oslo_vmware.api [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Task: {'id': task-2833056, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.335617} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.148348] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 611.149151] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 611.149475] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 611.151207] env[68638]: INFO nova.compute.manager [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Took 1.19 seconds to destroy the instance on the hypervisor. [ 611.151207] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 611.153717] env[68638]: DEBUG nova.compute.manager [-] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 611.155396] env[68638]: DEBUG nova.network.neutron [-] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 611.262584] env[68638]: DEBUG oslo_vmware.api [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Task: {'id': task-2833057, 'name': PowerOnVM_Task, 'duration_secs': 0.505257} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.263025] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 611.263231] env[68638]: INFO nova.compute.manager [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Took 11.05 seconds to spawn the instance on the hypervisor. [ 611.263406] env[68638]: DEBUG nova.compute.manager [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 611.266333] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f81361-2caa-4419-9319-c6cb2379c279 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.269591] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "refresh_cache-f767af17-f2bb-461d-9e7f-9c62b5504257" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.269591] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquired lock "refresh_cache-f767af17-f2bb-461d-9e7f-9c62b5504257" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 611.269591] env[68638]: DEBUG nova.network.neutron [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 611.372972] env[68638]: DEBUG nova.compute.utils [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 611.375633] env[68638]: DEBUG nova.compute.manager [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 611.376058] env[68638]: DEBUG nova.network.neutron [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 611.382795] env[68638]: INFO nova.compute.claims [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 611.403367] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833058, 'name': ReconfigVM_Task, 'duration_secs': 0.328929} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.406424] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Reconfigured VM instance instance-0000000d to attach disk [datastore1] 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4/4b5c5b9e-389d-4ed9-a860-bd41a33fbac4.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 611.407321] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f5b9b2b8-928f-4378-a4ce-ce3d7f1612d5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.417119] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52cd7d00-71b8-551f-141b-e76e98ae2114, 'name': SearchDatastore_Task, 'duration_secs': 0.0153} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.418243] env[68638]: DEBUG oslo_concurrency.lockutils [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 611.418535] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] c71693e9-aeaa-4f12-b5cf-a179e558505d/c71693e9-aeaa-4f12-b5cf-a179e558505d.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 611.419259] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for the task: (returnval){ [ 611.419259] env[68638]: value = "task-2833059" [ 611.419259] env[68638]: _type = "Task" [ 611.419259] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.419259] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5ca2839-95d6-4800-8159-f60dff129361 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.430315] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833059, 'name': Rename_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.431960] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Waiting for the task: (returnval){ [ 611.431960] env[68638]: value = "task-2833060" [ 611.431960] env[68638]: _type = "Task" [ 611.431960] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.440515] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833060, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.564200] env[68638]: DEBUG nova.policy [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1858fd67960a4a5eae88eb42f0a8eec6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f2c09879d5e1431caea12a0ae614a0a9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 611.586153] env[68638]: DEBUG nova.network.neutron [req-c50d0f09-bde3-48c3-8fc8-d066e881f936 req-ca06571b-c901-4060-a92c-0865a718ec68 service nova] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Updated VIF entry in instance network info cache for port 173c13c1-a5ec-4a6b-98d8-e039626a047e. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 611.586522] env[68638]: DEBUG nova.network.neutron [req-c50d0f09-bde3-48c3-8fc8-d066e881f936 req-ca06571b-c901-4060-a92c-0865a718ec68 service nova] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Updating instance_info_cache with network_info: [{"id": "173c13c1-a5ec-4a6b-98d8-e039626a047e", "address": "fa:16:3e:df:e0:03", "network": {"id": "d9d5f06a-7963-44e3-8a0a-c839c3624857", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-336879211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d127964153f4854b10dfc8f8eb0009d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffcecdaa-a7b8-49fc-9371-dbdb7744688e", "external-id": "nsx-vlan-transportzone-994", "segmentation_id": 994, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap173c13c1-a5", "ovs_interfaceid": "173c13c1-a5ec-4a6b-98d8-e039626a047e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.788855] env[68638]: INFO nova.compute.manager [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Took 27.38 seconds to build instance. [ 611.838402] env[68638]: DEBUG oslo_concurrency.lockutils [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Acquiring lock "aaf0185b-1a85-4e0e-afb1-55e9e2417d76" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 611.838662] env[68638]: DEBUG oslo_concurrency.lockutils [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Lock "aaf0185b-1a85-4e0e-afb1-55e9e2417d76" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 611.870206] env[68638]: DEBUG nova.network.neutron [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 611.875515] env[68638]: DEBUG nova.compute.manager [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 611.890510] env[68638]: INFO nova.compute.resource_tracker [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Updating resource usage from migration a046c66a-9a56-4cc5-8a1c-ed2bc6ccbe27 [ 611.947781] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833059, 'name': Rename_Task, 'duration_secs': 0.169323} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.949129] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 611.950035] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-321d6186-de36-4a0b-9207-61ba1c3b671c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.958259] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833060, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.965190] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for the task: (returnval){ [ 611.965190] env[68638]: value = "task-2833061" [ 611.965190] env[68638]: _type = "Task" [ 611.965190] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.978460] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833061, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.094342] env[68638]: DEBUG oslo_concurrency.lockutils [req-c50d0f09-bde3-48c3-8fc8-d066e881f936 req-ca06571b-c901-4060-a92c-0865a718ec68 service nova] Releasing lock "refresh_cache-4b5c5b9e-389d-4ed9-a860-bd41a33fbac4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 612.094625] env[68638]: DEBUG nova.compute.manager [req-c50d0f09-bde3-48c3-8fc8-d066e881f936 req-ca06571b-c901-4060-a92c-0865a718ec68 service nova] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Received event network-changed-83c2852d-0228-4c4e-b754-0dc81d6b8a11 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 612.094847] env[68638]: DEBUG nova.compute.manager [req-c50d0f09-bde3-48c3-8fc8-d066e881f936 req-ca06571b-c901-4060-a92c-0865a718ec68 service nova] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Refreshing instance network info cache due to event network-changed-83c2852d-0228-4c4e-b754-0dc81d6b8a11. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 612.095150] env[68638]: DEBUG oslo_concurrency.lockutils [req-c50d0f09-bde3-48c3-8fc8-d066e881f936 req-ca06571b-c901-4060-a92c-0865a718ec68 service nova] Acquiring lock "refresh_cache-423af2cc-4dea-445f-a01c-6d4d57c3f0de" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.095310] env[68638]: DEBUG oslo_concurrency.lockutils [req-c50d0f09-bde3-48c3-8fc8-d066e881f936 req-ca06571b-c901-4060-a92c-0865a718ec68 service nova] Acquired lock "refresh_cache-423af2cc-4dea-445f-a01c-6d4d57c3f0de" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 612.095478] env[68638]: DEBUG nova.network.neutron [req-c50d0f09-bde3-48c3-8fc8-d066e881f936 req-ca06571b-c901-4060-a92c-0865a718ec68 service nova] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Refreshing network info cache for port 83c2852d-0228-4c4e-b754-0dc81d6b8a11 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 612.180021] env[68638]: DEBUG nova.network.neutron [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Successfully updated port: 5ee0473a-5eb7-4b42-b970-cb92565f8dd5 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 612.185299] env[68638]: DEBUG nova.network.neutron [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Updating instance_info_cache with network_info: [{"id": "951987e2-f8ec-4ab6-a168-7db5fd4bb37c", "address": "fa:16:3e:7a:b1:4e", "network": {"id": "08d42292-b840-4bc6-bed8-5ccd8a3a2a29", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1723671578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dce3b1af81bc4cec877ef5a7e6999a7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3836fb52-19c6-4e10-a0ca-f0bca73dc887", "external-id": "nsx-vlan-transportzone-964", "segmentation_id": 964, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap951987e2-f8", "ovs_interfaceid": "951987e2-f8ec-4ab6-a168-7db5fd4bb37c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.291456] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e865f7-c6b6-4c96-9184-b86d26eb0777 tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Lock "ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.895s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 612.303420] env[68638]: DEBUG nova.network.neutron [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Successfully created port: e545c157-e03b-41b1-a90a-4519cddbdfaa {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 612.322431] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "2fa9b930-c76c-4cac-a371-a6b9899dc71e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.322665] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "2fa9b930-c76c-4cac-a371-a6b9899dc71e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 612.443307] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833060, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.590036} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.444546] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] c71693e9-aeaa-4f12-b5cf-a179e558505d/c71693e9-aeaa-4f12-b5cf-a179e558505d.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 612.444546] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 612.444546] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e3f06a5d-bc54-48ca-a29d-f5f116412ac3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.456422] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Waiting for the task: (returnval){ [ 612.456422] env[68638]: value = "task-2833062" [ 612.456422] env[68638]: _type = "Task" [ 612.456422] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.467026] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833062, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.480267] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833061, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.513664] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1578874c-c088-4372-8a09-9788ab36f4fc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.521373] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-482e3dac-9b0e-4edf-9e97-c633d70b27e5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.554365] env[68638]: DEBUG nova.network.neutron [-] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.556436] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b1bab2-e767-48ab-8192-201283bcb7fa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.563974] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e14ce68-5162-4784-b375-057725073415 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.581908] env[68638]: DEBUG nova.compute.provider_tree [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 612.685791] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "refresh_cache-8f841b29-0156-414e-8467-c9a9393cdae9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.685997] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquired lock "refresh_cache-8f841b29-0156-414e-8467-c9a9393cdae9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 612.686162] env[68638]: DEBUG nova.network.neutron [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 612.687499] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Releasing lock "refresh_cache-f767af17-f2bb-461d-9e7f-9c62b5504257" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 612.687960] env[68638]: DEBUG nova.compute.manager [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Instance network_info: |[{"id": "951987e2-f8ec-4ab6-a168-7db5fd4bb37c", "address": "fa:16:3e:7a:b1:4e", "network": {"id": "08d42292-b840-4bc6-bed8-5ccd8a3a2a29", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1723671578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dce3b1af81bc4cec877ef5a7e6999a7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3836fb52-19c6-4e10-a0ca-f0bca73dc887", "external-id": "nsx-vlan-transportzone-964", "segmentation_id": 964, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap951987e2-f8", "ovs_interfaceid": "951987e2-f8ec-4ab6-a168-7db5fd4bb37c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 612.688210] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:b1:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3836fb52-19c6-4e10-a0ca-f0bca73dc887', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '951987e2-f8ec-4ab6-a168-7db5fd4bb37c', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 612.698850] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Creating folder: Project (dce3b1af81bc4cec877ef5a7e6999a7f). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 612.699442] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-46c521a7-cc8b-48c9-85c1-7a2bb308a29b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.709997] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Created folder: Project (dce3b1af81bc4cec877ef5a7e6999a7f) in parent group-v569734. [ 612.710209] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Creating folder: Instances. Parent ref: group-v569775. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 612.710684] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-979652c7-554b-47c4-afcc-d981f8eae7e6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.722923] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Created folder: Instances in parent group-v569775. [ 612.723206] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 612.723403] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 612.725235] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b0cd1cf8-8b72-47eb-ac64-9cecbde6f68c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.747226] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 612.747226] env[68638]: value = "task-2833065" [ 612.747226] env[68638]: _type = "Task" [ 612.747226] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.763308] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833065, 'name': CreateVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.795465] env[68638]: DEBUG nova.compute.manager [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 612.886634] env[68638]: DEBUG nova.compute.manager [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 612.919633] env[68638]: DEBUG nova.virt.hardware [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 612.919972] env[68638]: DEBUG nova.virt.hardware [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 612.920194] env[68638]: DEBUG nova.virt.hardware [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 612.920456] env[68638]: DEBUG nova.virt.hardware [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 612.920613] env[68638]: DEBUG nova.virt.hardware [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 612.920768] env[68638]: DEBUG nova.virt.hardware [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 612.921007] env[68638]: DEBUG nova.virt.hardware [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 612.921583] env[68638]: DEBUG nova.virt.hardware [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 612.921583] env[68638]: DEBUG nova.virt.hardware [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 612.921583] env[68638]: DEBUG nova.virt.hardware [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 612.921802] env[68638]: DEBUG nova.virt.hardware [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 612.923115] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36909fd-c7bf-4998-b2fb-7af9c24e93a7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.934754] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7bc3633-92cf-4c54-ab4b-925cc2acc5cf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.962100] env[68638]: DEBUG nova.compute.manager [req-73de7ad0-56b2-44ef-9b1c-880c0c0d4b63 req-e391939f-1865-44ed-85f9-a6d4c7dd3068 service nova] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Received event network-vif-plugged-951987e2-f8ec-4ab6-a168-7db5fd4bb37c {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 612.962253] env[68638]: DEBUG oslo_concurrency.lockutils [req-73de7ad0-56b2-44ef-9b1c-880c0c0d4b63 req-e391939f-1865-44ed-85f9-a6d4c7dd3068 service nova] Acquiring lock "f767af17-f2bb-461d-9e7f-9c62b5504257-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.962460] env[68638]: DEBUG oslo_concurrency.lockutils [req-73de7ad0-56b2-44ef-9b1c-880c0c0d4b63 req-e391939f-1865-44ed-85f9-a6d4c7dd3068 service nova] Lock "f767af17-f2bb-461d-9e7f-9c62b5504257-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 612.962625] env[68638]: DEBUG oslo_concurrency.lockutils [req-73de7ad0-56b2-44ef-9b1c-880c0c0d4b63 req-e391939f-1865-44ed-85f9-a6d4c7dd3068 service nova] Lock "f767af17-f2bb-461d-9e7f-9c62b5504257-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 612.962836] env[68638]: DEBUG nova.compute.manager [req-73de7ad0-56b2-44ef-9b1c-880c0c0d4b63 req-e391939f-1865-44ed-85f9-a6d4c7dd3068 service nova] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] No waiting events found dispatching network-vif-plugged-951987e2-f8ec-4ab6-a168-7db5fd4bb37c {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 612.963012] env[68638]: WARNING nova.compute.manager [req-73de7ad0-56b2-44ef-9b1c-880c0c0d4b63 req-e391939f-1865-44ed-85f9-a6d4c7dd3068 service nova] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Received unexpected event network-vif-plugged-951987e2-f8ec-4ab6-a168-7db5fd4bb37c for instance with vm_state building and task_state spawning. [ 612.971903] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833062, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.17802} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.973600] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 612.974654] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-676b570d-6f9c-4a4f-8bec-3b3a90e8852a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.979318] env[68638]: DEBUG nova.network.neutron [req-c50d0f09-bde3-48c3-8fc8-d066e881f936 req-ca06571b-c901-4060-a92c-0865a718ec68 service nova] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Updated VIF entry in instance network info cache for port 83c2852d-0228-4c4e-b754-0dc81d6b8a11. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 612.979318] env[68638]: DEBUG nova.network.neutron [req-c50d0f09-bde3-48c3-8fc8-d066e881f936 req-ca06571b-c901-4060-a92c-0865a718ec68 service nova] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Updating instance_info_cache with network_info: [{"id": "83c2852d-0228-4c4e-b754-0dc81d6b8a11", "address": "fa:16:3e:27:30:e5", "network": {"id": "2181efd7-a094-4c4b-8754-da82e89be85a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1274773453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "efa342b9d9a34e9e8e708c8f356f905e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83c2852d-02", "ovs_interfaceid": "83c2852d-0228-4c4e-b754-0dc81d6b8a11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.993490] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833061, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.004319] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] c71693e9-aeaa-4f12-b5cf-a179e558505d/c71693e9-aeaa-4f12-b5cf-a179e558505d.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 613.005101] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b72ea98f-4eab-490c-8453-3edb8ef19d60 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.029020] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Waiting for the task: (returnval){ [ 613.029020] env[68638]: value = "task-2833066" [ 613.029020] env[68638]: _type = "Task" [ 613.029020] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.036902] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833066, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.061077] env[68638]: INFO nova.compute.manager [-] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Took 1.91 seconds to deallocate network for instance. [ 613.085440] env[68638]: DEBUG nova.scheduler.client.report [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 613.258750] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833065, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.271428] env[68638]: DEBUG nova.network.neutron [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 613.288120] env[68638]: DEBUG nova.compute.manager [req-2ec6bb26-1098-4672-9564-470f21bb1dae req-79e55017-a2df-4ffa-8671-6e46fd644133 service nova] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Received event network-vif-deleted-bcca1912-c5f3-4a93-a224-b3707bd3c2b0 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 613.318903] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 613.434217] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Acquiring lock "a5e993de-7aad-4b34-8946-563dc69a6f25" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 613.434463] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Lock "a5e993de-7aad-4b34-8946-563dc69a6f25" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 613.434607] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Acquiring lock "a5e993de-7aad-4b34-8946-563dc69a6f25-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 613.435089] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Lock "a5e993de-7aad-4b34-8946-563dc69a6f25-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 613.435089] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Lock "a5e993de-7aad-4b34-8946-563dc69a6f25-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 613.437229] env[68638]: INFO nova.compute.manager [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Terminating instance [ 613.471602] env[68638]: DEBUG nova.compute.manager [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 613.472192] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6686a2d-671d-40a6-b338-3766a1552183 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.481326] env[68638]: DEBUG oslo_concurrency.lockutils [req-c50d0f09-bde3-48c3-8fc8-d066e881f936 req-ca06571b-c901-4060-a92c-0865a718ec68 service nova] Releasing lock "refresh_cache-423af2cc-4dea-445f-a01c-6d4d57c3f0de" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 613.487474] env[68638]: DEBUG oslo_vmware.api [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833061, 'name': PowerOnVM_Task, 'duration_secs': 1.147162} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.491011] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 613.491011] env[68638]: INFO nova.compute.manager [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Took 10.61 seconds to spawn the instance on the hypervisor. [ 613.491803] env[68638]: DEBUG nova.compute.manager [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 613.499335] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8007cf-bb8b-4163-80f7-7847b6b084f3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.539388] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833066, 'name': ReconfigVM_Task, 'duration_secs': 0.450219} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.540413] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Reconfigured VM instance instance-0000000a to attach disk [datastore2] c71693e9-aeaa-4f12-b5cf-a179e558505d/c71693e9-aeaa-4f12-b5cf-a179e558505d.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 613.541225] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c04495e3-0f38-4f21-a393-01959e27bcc7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.543984] env[68638]: DEBUG nova.network.neutron [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Updating instance_info_cache with network_info: [{"id": "5ee0473a-5eb7-4b42-b970-cb92565f8dd5", "address": "fa:16:3e:14:60:7d", "network": {"id": "08d42292-b840-4bc6-bed8-5ccd8a3a2a29", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1723671578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dce3b1af81bc4cec877ef5a7e6999a7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3836fb52-19c6-4e10-a0ca-f0bca73dc887", "external-id": "nsx-vlan-transportzone-964", "segmentation_id": 964, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ee0473a-5e", "ovs_interfaceid": "5ee0473a-5eb7-4b42-b970-cb92565f8dd5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.550289] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Waiting for the task: (returnval){ [ 613.550289] env[68638]: value = "task-2833067" [ 613.550289] env[68638]: _type = "Task" [ 613.550289] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.561116] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833067, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.566869] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 613.591101] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.720s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 613.591267] env[68638]: INFO nova.compute.manager [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Migrating [ 613.591561] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.591771] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquired lock "compute-rpcapi-router" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 613.593298] env[68638]: DEBUG oslo_concurrency.lockutils [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.066s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 613.595279] env[68638]: INFO nova.compute.claims [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 613.759293] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833065, 'name': CreateVM_Task, 'duration_secs': 0.760727} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.759783] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 613.760575] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.760752] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 613.761087] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 613.761356] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8a26b9a-d472-4d01-8cb5-39deecf702b0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.765996] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 613.765996] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]527c3dff-d831-4082-4cdb-943dc86c992b" [ 613.765996] env[68638]: _type = "Task" [ 613.765996] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.775032] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527c3dff-d831-4082-4cdb-943dc86c992b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.819186] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Acquiring lock "7b0b6eec-4681-4926-ad3f-5572e022a467" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 613.819521] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Lock "7b0b6eec-4681-4926-ad3f-5572e022a467" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 613.941654] env[68638]: DEBUG nova.compute.manager [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 613.941892] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 613.943360] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8553a28f-289e-48f4-83cf-4bd702bef713 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.951439] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 613.951736] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-251961f0-ddb8-4ff7-ba23-3972f2dbc5b6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.958742] env[68638]: DEBUG oslo_vmware.api [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Waiting for the task: (returnval){ [ 613.958742] env[68638]: value = "task-2833068" [ 613.958742] env[68638]: _type = "Task" [ 613.958742] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.967820] env[68638]: DEBUG oslo_vmware.api [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': task-2833068, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.007033] env[68638]: INFO nova.compute.manager [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] instance snapshotting [ 614.009638] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d83c5d-3ac6-4845-84cb-373f0b2914d5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.038768] env[68638]: INFO nova.compute.manager [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Took 27.25 seconds to build instance. [ 614.040539] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a034af-bdb8-4bad-8f03-7b32645b79ac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.048395] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Releasing lock "refresh_cache-8f841b29-0156-414e-8467-c9a9393cdae9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 614.049044] env[68638]: DEBUG nova.compute.manager [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Instance network_info: |[{"id": "5ee0473a-5eb7-4b42-b970-cb92565f8dd5", "address": "fa:16:3e:14:60:7d", "network": {"id": "08d42292-b840-4bc6-bed8-5ccd8a3a2a29", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1723671578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dce3b1af81bc4cec877ef5a7e6999a7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3836fb52-19c6-4e10-a0ca-f0bca73dc887", "external-id": "nsx-vlan-transportzone-964", "segmentation_id": 964, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ee0473a-5e", "ovs_interfaceid": "5ee0473a-5eb7-4b42-b970-cb92565f8dd5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 614.051778] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:60:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3836fb52-19c6-4e10-a0ca-f0bca73dc887', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5ee0473a-5eb7-4b42-b970-cb92565f8dd5', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 614.060659] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 614.061267] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 614.065429] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7bc3681d-4d5d-4035-ac02-50a646827c8b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.085737] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833067, 'name': Rename_Task, 'duration_secs': 0.149377} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.087204] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 614.087672] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 614.087672] env[68638]: value = "task-2833069" [ 614.087672] env[68638]: _type = "Task" [ 614.087672] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.088314] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2632f644-4ee3-44d8-a8a1-cebd1aeb946b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.099354] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833069, 'name': CreateVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.101193] env[68638]: INFO nova.compute.rpcapi [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 614.101764] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Releasing lock "compute-rpcapi-router" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 614.110670] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Waiting for the task: (returnval){ [ 614.110670] env[68638]: value = "task-2833070" [ 614.110670] env[68638]: _type = "Task" [ 614.110670] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.131301] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833070, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.251760] env[68638]: DEBUG nova.network.neutron [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Successfully updated port: e545c157-e03b-41b1-a90a-4519cddbdfaa {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 614.278698] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527c3dff-d831-4082-4cdb-943dc86c992b, 'name': SearchDatastore_Task, 'duration_secs': 0.010376} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.279022] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 614.279255] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 614.279624] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.279773] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 614.279951] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 614.280605] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4b7c2f4d-19ec-48b1-ae96-309f4462e37a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.290363] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 614.290556] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 614.291303] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52febcd0-2c92-4448-9624-41ee9efc3508 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.298126] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 614.298126] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528d554e-e827-89ea-0719-7c2e775e10cc" [ 614.298126] env[68638]: _type = "Task" [ 614.298126] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.306690] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528d554e-e827-89ea-0719-7c2e775e10cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.471454] env[68638]: DEBUG oslo_vmware.api [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': task-2833068, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.544638] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f78a181-9f83-4b1e-bedf-7f4f00386501 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Lock "4b5c5b9e-389d-4ed9-a860-bd41a33fbac4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.772s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 614.562964] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Creating Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 614.563336] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-360957bc-b890-4175-acbc-a273b26dd9a2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.571156] env[68638]: DEBUG oslo_vmware.api [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 614.571156] env[68638]: value = "task-2833071" [ 614.571156] env[68638]: _type = "Task" [ 614.571156] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.584330] env[68638]: DEBUG oslo_vmware.api [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833071, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.600380] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833069, 'name': CreateVM_Task, 'duration_secs': 0.489193} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.600380] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 614.605567] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.605567] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 614.605567] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 614.605567] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc40daac-ead6-4b19-b264-681de880054b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.609854] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 614.609854] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52006fd6-50b3-d361-5f9b-a1c5dc2035d5" [ 614.609854] env[68638]: _type = "Task" [ 614.609854] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.624297] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52006fd6-50b3-d361-5f9b-a1c5dc2035d5, 'name': SearchDatastore_Task, 'duration_secs': 0.010196} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.625054] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 614.627637] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 614.627637] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.627637] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "refresh_cache-7617a7b1-3b21-4d38-b090-1d35bc74637b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.627637] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquired lock "refresh_cache-7617a7b1-3b21-4d38-b090-1d35bc74637b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 614.627637] env[68638]: DEBUG nova.network.neutron [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 614.631744] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833070, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.756844] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Acquiring lock "refresh_cache-a5dedd3e-a544-4005-bc9b-0735267d6753" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.756992] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Acquired lock "refresh_cache-a5dedd3e-a544-4005-bc9b-0735267d6753" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 614.758362] env[68638]: DEBUG nova.network.neutron [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 614.789161] env[68638]: DEBUG oslo_concurrency.lockutils [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "5a28d684-584b-4e13-9910-183119ce5d37" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 614.789435] env[68638]: DEBUG oslo_concurrency.lockutils [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "5a28d684-584b-4e13-9910-183119ce5d37" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 614.814223] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528d554e-e827-89ea-0719-7c2e775e10cc, 'name': SearchDatastore_Task, 'duration_secs': 0.008941} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.815078] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65bc258b-170e-49f1-ab66-ce584ac918e7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.822852] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 614.822852] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52506dc6-32c0-6e0a-a1e1-2b4ca9659628" [ 614.822852] env[68638]: _type = "Task" [ 614.822852] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.835620] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52506dc6-32c0-6e0a-a1e1-2b4ca9659628, 'name': SearchDatastore_Task, 'duration_secs': 0.009353} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.835620] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 614.835930] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] f767af17-f2bb-461d-9e7f-9c62b5504257/f767af17-f2bb-461d-9e7f-9c62b5504257.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 614.835930] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 614.836454] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 614.836454] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2022c945-d05b-4bed-bcef-0c604c2e5406 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.838735] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-99f7987b-5f91-4a66-8a34-5014462f653b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.853331] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 614.853572] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 614.854508] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 614.854508] env[68638]: value = "task-2833072" [ 614.854508] env[68638]: _type = "Task" [ 614.854508] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.854658] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f549110f-ad9d-4c61-bcb1-c1b0ca2728cf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.864039] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 614.864039] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e41bd4-5223-328e-9870-1dad7cc86d42" [ 614.864039] env[68638]: _type = "Task" [ 614.864039] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.869895] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833072, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.881542] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e41bd4-5223-328e-9870-1dad7cc86d42, 'name': SearchDatastore_Task, 'duration_secs': 0.008767} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.881678] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b58a57b-f8b1-42fa-a066-eaba00a85c59 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.887091] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 614.887091] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52afda43-460f-04ec-8cf4-821fe234a602" [ 614.887091] env[68638]: _type = "Task" [ 614.887091] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.902512] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52afda43-460f-04ec-8cf4-821fe234a602, 'name': SearchDatastore_Task, 'duration_secs': 0.00872} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.902701] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 614.902960] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 8f841b29-0156-414e-8467-c9a9393cdae9/8f841b29-0156-414e-8467-c9a9393cdae9.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 614.903649] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bea3a761-12e5-4779-9e53-6d6db07871ec {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.918680] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 614.918680] env[68638]: value = "task-2833073" [ 614.918680] env[68638]: _type = "Task" [ 614.918680] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.931802] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833073, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.977035] env[68638]: DEBUG oslo_vmware.api [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': task-2833068, 'name': PowerOffVM_Task, 'duration_secs': 0.7185} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.977035] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 614.977035] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 614.977035] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-03d86208-f571-4a52-a963-5d62690b2b66 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.046694] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 615.046924] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 615.047117] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Deleting the datastore file [datastore1] a5e993de-7aad-4b34-8946-563dc69a6f25 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 615.047671] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-24cd2a83-7e21-48de-8be9-49634a05695c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.050037] env[68638]: DEBUG nova.compute.manager [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 615.064928] env[68638]: DEBUG oslo_vmware.api [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Waiting for the task: (returnval){ [ 615.064928] env[68638]: value = "task-2833075" [ 615.064928] env[68638]: _type = "Task" [ 615.064928] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.078358] env[68638]: DEBUG oslo_vmware.api [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': task-2833075, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.101115] env[68638]: DEBUG oslo_vmware.api [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833071, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.130179] env[68638]: DEBUG oslo_vmware.api [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833070, 'name': PowerOnVM_Task, 'duration_secs': 0.603924} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.135038] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 615.135281] env[68638]: DEBUG nova.compute.manager [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 615.142517] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d6b6818-98f3-474d-80ec-6aae253f7acd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.368118] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833072, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509238} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.371183] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] f767af17-f2bb-461d-9e7f-9c62b5504257/f767af17-f2bb-461d-9e7f-9c62b5504257.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 615.371874] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 615.372729] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f12b6ca6-ab80-484f-9784-b33d5db55c25 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.376781] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0b320d92-fbd7-41d6-8d63-272cac44c639 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.386714] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a93efa61-ad20-47a3-8684-4ea8855c1e69 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.392107] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 615.392107] env[68638]: value = "task-2833076" [ 615.392107] env[68638]: _type = "Task" [ 615.392107] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.440046] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6549855d-12e3-4992-87d6-2d9214ca7fc4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.444191] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833076, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.450236] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833073, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.453916] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ef62f5-7c8d-4275-95a2-ae8b48b4e2d3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.470374] env[68638]: DEBUG nova.compute.provider_tree [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 615.575501] env[68638]: DEBUG oslo_vmware.api [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Task: {'id': task-2833075, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.323085} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.575914] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 615.576119] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 615.576371] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 615.576573] env[68638]: INFO nova.compute.manager [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Took 1.63 seconds to destroy the instance on the hypervisor. [ 615.576942] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 615.580428] env[68638]: DEBUG nova.compute.manager [-] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 615.580428] env[68638]: DEBUG nova.network.neutron [-] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 615.584799] env[68638]: DEBUG nova.network.neutron [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 615.588906] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 615.593075] env[68638]: DEBUG oslo_vmware.api [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833071, 'name': CreateSnapshot_Task, 'duration_secs': 0.856695} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.593400] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Created Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 615.596026] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f367312-afc7-4d85-851a-c9476b403412 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.607545] env[68638]: DEBUG nova.network.neutron [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Updating instance_info_cache with network_info: [{"id": "41ce015b-dfb7-4031-a11b-8dfd0e29bb62", "address": "fa:16:3e:17:75:81", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.112", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41ce015b-df", "ovs_interfaceid": "41ce015b-dfb7-4031-a11b-8dfd0e29bb62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.669162] env[68638]: DEBUG oslo_concurrency.lockutils [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 615.903548] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833076, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093678} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.903862] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 615.905269] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda49ef0-90d2-498e-ac25-c9d83722594e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.931945] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Reconfiguring VM instance instance-0000000e to attach disk [datastore2] f767af17-f2bb-461d-9e7f-9c62b5504257/f767af17-f2bb-461d-9e7f-9c62b5504257.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 615.932722] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-279c1376-2033-4196-bc55-c39fdfff5781 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.957177] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833073, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.766653} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.958392] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 8f841b29-0156-414e-8467-c9a9393cdae9/8f841b29-0156-414e-8467-c9a9393cdae9.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 615.958623] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 615.958934] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 615.958934] env[68638]: value = "task-2833077" [ 615.958934] env[68638]: _type = "Task" [ 615.958934] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.959327] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6e789b90-5773-48ae-897c-729b2943369f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.966873] env[68638]: DEBUG nova.compute.manager [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Received event network-changed-951987e2-f8ec-4ab6-a168-7db5fd4bb37c {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 615.967060] env[68638]: DEBUG nova.compute.manager [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Refreshing instance network info cache due to event network-changed-951987e2-f8ec-4ab6-a168-7db5fd4bb37c. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 615.967273] env[68638]: DEBUG oslo_concurrency.lockutils [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] Acquiring lock "refresh_cache-f767af17-f2bb-461d-9e7f-9c62b5504257" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.967416] env[68638]: DEBUG oslo_concurrency.lockutils [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] Acquired lock "refresh_cache-f767af17-f2bb-461d-9e7f-9c62b5504257" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 615.967574] env[68638]: DEBUG nova.network.neutron [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Refreshing network info cache for port 951987e2-f8ec-4ab6-a168-7db5fd4bb37c {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 615.972628] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 615.972628] env[68638]: value = "task-2833078" [ 615.972628] env[68638]: _type = "Task" [ 615.972628] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.973821] env[68638]: DEBUG nova.scheduler.client.report [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 615.988448] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833077, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.994712] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833078, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.076654] env[68638]: DEBUG nova.network.neutron [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Updating instance_info_cache with network_info: [{"id": "e545c157-e03b-41b1-a90a-4519cddbdfaa", "address": "fa:16:3e:d6:53:a1", "network": {"id": "f815fcac-f046-4a1f-8fcc-7ef83f914185", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-489253153-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f2c09879d5e1431caea12a0ae614a0a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7894814c-6be3-4b80-a08e-4a771bc05dd1", "external-id": "nsx-vlan-transportzone-948", "segmentation_id": 948, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape545c157-e0", "ovs_interfaceid": "e545c157-e03b-41b1-a90a-4519cddbdfaa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.114015] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Creating linked-clone VM from snapshot {{(pid=68638) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 616.114980] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Releasing lock "refresh_cache-7617a7b1-3b21-4d38-b090-1d35bc74637b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 616.116344] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4fa7421f-4b65-4cb2-9499-b028f4d93679 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.126366] env[68638]: DEBUG oslo_vmware.api [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 616.126366] env[68638]: value = "task-2833079" [ 616.126366] env[68638]: _type = "Task" [ 616.126366] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.135906] env[68638]: DEBUG oslo_vmware.api [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833079, 'name': CloneVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.138218] env[68638]: DEBUG nova.compute.manager [req-f206628a-19b6-4c80-a3e9-87b6432454ec req-80eb5556-f7ea-42de-b675-8723d9280410 service nova] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Received event network-vif-plugged-e545c157-e03b-41b1-a90a-4519cddbdfaa {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 616.138374] env[68638]: DEBUG oslo_concurrency.lockutils [req-f206628a-19b6-4c80-a3e9-87b6432454ec req-80eb5556-f7ea-42de-b675-8723d9280410 service nova] Acquiring lock "a5dedd3e-a544-4005-bc9b-0735267d6753-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.138582] env[68638]: DEBUG oslo_concurrency.lockutils [req-f206628a-19b6-4c80-a3e9-87b6432454ec req-80eb5556-f7ea-42de-b675-8723d9280410 service nova] Lock "a5dedd3e-a544-4005-bc9b-0735267d6753-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 616.138747] env[68638]: DEBUG oslo_concurrency.lockutils [req-f206628a-19b6-4c80-a3e9-87b6432454ec req-80eb5556-f7ea-42de-b675-8723d9280410 service nova] Lock "a5dedd3e-a544-4005-bc9b-0735267d6753-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 616.138970] env[68638]: DEBUG nova.compute.manager [req-f206628a-19b6-4c80-a3e9-87b6432454ec req-80eb5556-f7ea-42de-b675-8723d9280410 service nova] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] No waiting events found dispatching network-vif-plugged-e545c157-e03b-41b1-a90a-4519cddbdfaa {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 616.139112] env[68638]: WARNING nova.compute.manager [req-f206628a-19b6-4c80-a3e9-87b6432454ec req-80eb5556-f7ea-42de-b675-8723d9280410 service nova] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Received unexpected event network-vif-plugged-e545c157-e03b-41b1-a90a-4519cddbdfaa for instance with vm_state building and task_state spawning. [ 616.139234] env[68638]: DEBUG nova.compute.manager [req-f206628a-19b6-4c80-a3e9-87b6432454ec req-80eb5556-f7ea-42de-b675-8723d9280410 service nova] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Received event network-changed-e545c157-e03b-41b1-a90a-4519cddbdfaa {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 616.139410] env[68638]: DEBUG nova.compute.manager [req-f206628a-19b6-4c80-a3e9-87b6432454ec req-80eb5556-f7ea-42de-b675-8723d9280410 service nova] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Refreshing instance network info cache due to event network-changed-e545c157-e03b-41b1-a90a-4519cddbdfaa. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 616.139579] env[68638]: DEBUG oslo_concurrency.lockutils [req-f206628a-19b6-4c80-a3e9-87b6432454ec req-80eb5556-f7ea-42de-b675-8723d9280410 service nova] Acquiring lock "refresh_cache-a5dedd3e-a544-4005-bc9b-0735267d6753" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 616.477584] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833077, 'name': ReconfigVM_Task, 'duration_secs': 0.279325} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.481424] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Reconfigured VM instance instance-0000000e to attach disk [datastore2] f767af17-f2bb-461d-9e7f-9c62b5504257/f767af17-f2bb-461d-9e7f-9c62b5504257.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 616.482476] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5459a948-3476-4620-bc91-0744f1abb4e2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.484974] env[68638]: DEBUG oslo_concurrency.lockutils [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.892s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 616.485596] env[68638]: DEBUG nova.compute.manager [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 616.491278] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.590s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 616.492786] env[68638]: INFO nova.compute.claims [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 616.496436] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833078, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076889} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.497159] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 616.498498] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f249af85-2b30-4c90-9239-9389b09bc084 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.503433] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 616.503433] env[68638]: value = "task-2833080" [ 616.503433] env[68638]: _type = "Task" [ 616.503433] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.528649] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Reconfiguring VM instance instance-0000000f to attach disk [datastore2] 8f841b29-0156-414e-8467-c9a9393cdae9/8f841b29-0156-414e-8467-c9a9393cdae9.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 616.529652] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf194291-25ea-4bce-ac83-b0d4e3eaea87 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.551506] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833080, 'name': Rename_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.556685] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 616.556685] env[68638]: value = "task-2833081" [ 616.556685] env[68638]: _type = "Task" [ 616.556685] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.567161] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833081, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.579331] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Releasing lock "refresh_cache-a5dedd3e-a544-4005-bc9b-0735267d6753" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 616.579670] env[68638]: DEBUG nova.compute.manager [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Instance network_info: |[{"id": "e545c157-e03b-41b1-a90a-4519cddbdfaa", "address": "fa:16:3e:d6:53:a1", "network": {"id": "f815fcac-f046-4a1f-8fcc-7ef83f914185", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-489253153-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f2c09879d5e1431caea12a0ae614a0a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7894814c-6be3-4b80-a08e-4a771bc05dd1", "external-id": "nsx-vlan-transportzone-948", "segmentation_id": 948, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape545c157-e0", "ovs_interfaceid": "e545c157-e03b-41b1-a90a-4519cddbdfaa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 616.579981] env[68638]: DEBUG oslo_concurrency.lockutils [req-f206628a-19b6-4c80-a3e9-87b6432454ec req-80eb5556-f7ea-42de-b675-8723d9280410 service nova] Acquired lock "refresh_cache-a5dedd3e-a544-4005-bc9b-0735267d6753" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 616.580323] env[68638]: DEBUG nova.network.neutron [req-f206628a-19b6-4c80-a3e9-87b6432454ec req-80eb5556-f7ea-42de-b675-8723d9280410 service nova] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Refreshing network info cache for port e545c157-e03b-41b1-a90a-4519cddbdfaa {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 616.585146] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:53:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7894814c-6be3-4b80-a08e-4a771bc05dd1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e545c157-e03b-41b1-a90a-4519cddbdfaa', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 616.590832] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Creating folder: Project (f2c09879d5e1431caea12a0ae614a0a9). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 616.591918] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8e5d5dc-06a7-49de-b817-8f8f9ee7490b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.603946] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Created folder: Project (f2c09879d5e1431caea12a0ae614a0a9) in parent group-v569734. [ 616.604168] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Creating folder: Instances. Parent ref: group-v569781. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 616.604413] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-223ebbbc-1133-4cab-8952-a169ee856442 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.616793] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Created folder: Instances in parent group-v569781. [ 616.617058] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 616.617268] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 616.617479] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c7752538-63ab-4ea8-b97f-bc92c513b0f3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.653040] env[68638]: DEBUG oslo_vmware.api [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833079, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.653640] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 616.653640] env[68638]: value = "task-2833084" [ 616.653640] env[68638]: _type = "Task" [ 616.653640] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.663666] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833084, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.718661] env[68638]: DEBUG nova.network.neutron [-] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.000600] env[68638]: DEBUG nova.compute.utils [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 617.004719] env[68638]: DEBUG nova.compute.manager [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 617.004719] env[68638]: DEBUG nova.network.neutron [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 617.020074] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833080, 'name': Rename_Task, 'duration_secs': 0.149585} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.022314] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 617.022314] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d184e355-e400-45f9-9a78-f812fcd02fe0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.030316] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 617.030316] env[68638]: value = "task-2833085" [ 617.030316] env[68638]: _type = "Task" [ 617.030316] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.039952] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833085, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.049043] env[68638]: DEBUG nova.network.neutron [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Updated VIF entry in instance network info cache for port 951987e2-f8ec-4ab6-a168-7db5fd4bb37c. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 617.049495] env[68638]: DEBUG nova.network.neutron [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Updating instance_info_cache with network_info: [{"id": "951987e2-f8ec-4ab6-a168-7db5fd4bb37c", "address": "fa:16:3e:7a:b1:4e", "network": {"id": "08d42292-b840-4bc6-bed8-5ccd8a3a2a29", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1723671578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dce3b1af81bc4cec877ef5a7e6999a7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3836fb52-19c6-4e10-a0ca-f0bca73dc887", "external-id": "nsx-vlan-transportzone-964", "segmentation_id": 964, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap951987e2-f8", "ovs_interfaceid": "951987e2-f8ec-4ab6-a168-7db5fd4bb37c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.068996] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.102916] env[68638]: DEBUG nova.policy [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f7e9f64ef5ef4f2c9d8100ed55e7cbc8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '373459ee626847e9886e5ff353729280', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 617.159695] env[68638]: DEBUG oslo_vmware.api [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833079, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.168535] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833084, 'name': CreateVM_Task, 'duration_secs': 0.356867} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.168730] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 617.169454] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.169641] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 617.169970] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 617.171030] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2727cd4-f12b-4149-82b0-6a70f9e3d31e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.175929] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Waiting for the task: (returnval){ [ 617.175929] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52bab0c7-0bf6-ee30-e44b-fe37b0a87600" [ 617.175929] env[68638]: _type = "Task" [ 617.175929] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.185545] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52bab0c7-0bf6-ee30-e44b-fe37b0a87600, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.223220] env[68638]: INFO nova.compute.manager [-] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Took 1.64 seconds to deallocate network for instance. [ 617.464571] env[68638]: INFO nova.compute.manager [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Rebuilding instance [ 617.505410] env[68638]: DEBUG nova.compute.manager [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 617.528156] env[68638]: DEBUG nova.compute.manager [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 617.528156] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd4a8101-5ff7-4be8-9a2b-42109ba6cd53 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.551473] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833085, 'name': PowerOnVM_Task, 'duration_secs': 0.469499} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.555050] env[68638]: DEBUG oslo_concurrency.lockutils [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] Releasing lock "refresh_cache-f767af17-f2bb-461d-9e7f-9c62b5504257" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 617.555050] env[68638]: DEBUG nova.compute.manager [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Received event network-vif-plugged-5ee0473a-5eb7-4b42-b970-cb92565f8dd5 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 617.555050] env[68638]: DEBUG oslo_concurrency.lockutils [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] Acquiring lock "8f841b29-0156-414e-8467-c9a9393cdae9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 617.555050] env[68638]: DEBUG oslo_concurrency.lockutils [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] Lock "8f841b29-0156-414e-8467-c9a9393cdae9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 617.555050] env[68638]: DEBUG oslo_concurrency.lockutils [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] Lock "8f841b29-0156-414e-8467-c9a9393cdae9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 617.555431] env[68638]: DEBUG nova.compute.manager [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] No waiting events found dispatching network-vif-plugged-5ee0473a-5eb7-4b42-b970-cb92565f8dd5 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 617.555431] env[68638]: WARNING nova.compute.manager [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Received unexpected event network-vif-plugged-5ee0473a-5eb7-4b42-b970-cb92565f8dd5 for instance with vm_state building and task_state spawning. [ 617.555431] env[68638]: DEBUG nova.compute.manager [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Received event network-changed-5ee0473a-5eb7-4b42-b970-cb92565f8dd5 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 617.555431] env[68638]: DEBUG nova.compute.manager [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Refreshing instance network info cache due to event network-changed-5ee0473a-5eb7-4b42-b970-cb92565f8dd5. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 617.555431] env[68638]: DEBUG oslo_concurrency.lockutils [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] Acquiring lock "refresh_cache-8f841b29-0156-414e-8467-c9a9393cdae9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.555630] env[68638]: DEBUG oslo_concurrency.lockutils [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] Acquired lock "refresh_cache-8f841b29-0156-414e-8467-c9a9393cdae9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 617.555630] env[68638]: DEBUG nova.network.neutron [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Refreshing network info cache for port 5ee0473a-5eb7-4b42-b970-cb92565f8dd5 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 617.556686] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 617.556686] env[68638]: INFO nova.compute.manager [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Took 9.98 seconds to spawn the instance on the hypervisor. [ 617.556686] env[68638]: DEBUG nova.compute.manager [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 617.557796] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c18bd4dd-353c-45c0-a351-424dd57e230e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.578129] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833081, 'name': ReconfigVM_Task, 'duration_secs': 0.775629} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.578859] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Reconfigured VM instance instance-0000000f to attach disk [datastore2] 8f841b29-0156-414e-8467-c9a9393cdae9/8f841b29-0156-414e-8467-c9a9393cdae9.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 617.579527] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-26c63958-5a62-49a1-9747-5e2b01b3a418 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.587424] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 617.587424] env[68638]: value = "task-2833086" [ 617.587424] env[68638]: _type = "Task" [ 617.587424] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.606672] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833086, 'name': Rename_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.656466] env[68638]: DEBUG oslo_vmware.api [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833079, 'name': CloneVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.659567] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c848e2-1292-43c8-8335-7f1134b3cd13 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.682993] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Updating instance '7617a7b1-3b21-4d38-b090-1d35bc74637b' progress to 0 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 617.703389] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52bab0c7-0bf6-ee30-e44b-fe37b0a87600, 'name': SearchDatastore_Task, 'duration_secs': 0.011055} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.703707] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 617.704019] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 617.704240] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.704617] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 617.704617] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 617.705197] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c93cb489-c3a6-4caf-8c7f-8e0d68785fdd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.715324] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 617.715860] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 617.716655] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8357a60-39ea-4438-8be3-95b4ed73d8d2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.727199] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Waiting for the task: (returnval){ [ 617.727199] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52aea935-e41a-0bdc-c748-bd6c99ae0e63" [ 617.727199] env[68638]: _type = "Task" [ 617.727199] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.732050] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 617.737752] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52aea935-e41a-0bdc-c748-bd6c99ae0e63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.027061] env[68638]: DEBUG nova.network.neutron [req-f206628a-19b6-4c80-a3e9-87b6432454ec req-80eb5556-f7ea-42de-b675-8723d9280410 service nova] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Updated VIF entry in instance network info cache for port e545c157-e03b-41b1-a90a-4519cddbdfaa. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 618.027328] env[68638]: DEBUG nova.network.neutron [req-f206628a-19b6-4c80-a3e9-87b6432454ec req-80eb5556-f7ea-42de-b675-8723d9280410 service nova] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Updating instance_info_cache with network_info: [{"id": "e545c157-e03b-41b1-a90a-4519cddbdfaa", "address": "fa:16:3e:d6:53:a1", "network": {"id": "f815fcac-f046-4a1f-8fcc-7ef83f914185", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-489253153-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f2c09879d5e1431caea12a0ae614a0a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7894814c-6be3-4b80-a08e-4a771bc05dd1", "external-id": "nsx-vlan-transportzone-948", "segmentation_id": 948, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape545c157-e0", "ovs_interfaceid": "e545c157-e03b-41b1-a90a-4519cddbdfaa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.092224] env[68638]: INFO nova.compute.manager [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Took 24.72 seconds to build instance. [ 618.098959] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833086, 'name': Rename_Task, 'duration_secs': 0.173602} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.099254] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 618.099509] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c4d3fddc-4fc6-4ab8-a6df-9276032b9818 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.107200] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 618.107200] env[68638]: value = "task-2833087" [ 618.107200] env[68638]: _type = "Task" [ 618.107200] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.123883] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833087, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.151756] env[68638]: DEBUG oslo_vmware.api [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833079, 'name': CloneVM_Task, 'duration_secs': 1.552319} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.152362] env[68638]: INFO nova.virt.vmwareapi.vmops [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Created linked-clone VM from snapshot [ 618.153193] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1144d571-babf-4ff3-bd58-f0d1697bee40 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.161499] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Uploading image 4eb37f28-a930-45b2-9458-d6e77436b6ef {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 618.180143] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Destroying the VM {{(pid=68638) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 618.180143] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-cdde14ee-1333-4152-b7ea-e37dc586085c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.183246] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3dc6414-1b3a-4bcd-b323-3c557cc5ff2e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.194029] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3bde242-c997-418a-b2c7-519a7a5f60cd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.195525] env[68638]: DEBUG oslo_vmware.api [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 618.195525] env[68638]: value = "task-2833088" [ 618.195525] env[68638]: _type = "Task" [ 618.195525] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.227812] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 618.230753] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-342e09d8-d21d-4100-b541-d515e20e8b46 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.233253] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99b88552-cfa0-4067-9bd5-268e12203412 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.239215] env[68638]: DEBUG nova.network.neutron [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Successfully created port: c94367eb-4dac-4137-92b7-00d32ad0be7c {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 618.244217] env[68638]: DEBUG oslo_vmware.api [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833088, 'name': Destroy_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.252851] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-915b8f03-f6c3-405b-97fe-2060db1aa88d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.257228] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52aea935-e41a-0bdc-c748-bd6c99ae0e63, 'name': SearchDatastore_Task, 'duration_secs': 0.01047} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.259589] env[68638]: DEBUG oslo_vmware.api [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 618.259589] env[68638]: value = "task-2833089" [ 618.259589] env[68638]: _type = "Task" [ 618.259589] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.260598] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b99c5650-bef3-426e-ba8e-1c745ad8677d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.274070] env[68638]: DEBUG nova.compute.provider_tree [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 618.282257] env[68638]: DEBUG oslo_vmware.api [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833089, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.283701] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Waiting for the task: (returnval){ [ 618.283701] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52cc51bc-f2d3-7390-e853-99051be1eedb" [ 618.283701] env[68638]: _type = "Task" [ 618.283701] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.294877] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52cc51bc-f2d3-7390-e853-99051be1eedb, 'name': SearchDatastore_Task, 'duration_secs': 0.009988} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.295143] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 618.295396] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] a5dedd3e-a544-4005-bc9b-0735267d6753/a5dedd3e-a544-4005-bc9b-0735267d6753.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 618.295645] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c3fdc51-a2f1-4827-9bf2-bce91f70258f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.301330] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Waiting for the task: (returnval){ [ 618.301330] env[68638]: value = "task-2833090" [ 618.301330] env[68638]: _type = "Task" [ 618.301330] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.309259] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833090, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.523027] env[68638]: DEBUG nova.compute.manager [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 618.532607] env[68638]: DEBUG oslo_concurrency.lockutils [req-f206628a-19b6-4c80-a3e9-87b6432454ec req-80eb5556-f7ea-42de-b675-8723d9280410 service nova] Releasing lock "refresh_cache-a5dedd3e-a544-4005-bc9b-0735267d6753" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 618.554302] env[68638]: DEBUG nova.virt.hardware [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 618.559027] env[68638]: DEBUG nova.virt.hardware [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 618.559027] env[68638]: DEBUG nova.virt.hardware [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 618.559027] env[68638]: DEBUG nova.virt.hardware [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 618.559027] env[68638]: DEBUG nova.virt.hardware [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 618.559027] env[68638]: DEBUG nova.virt.hardware [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 618.559587] env[68638]: DEBUG nova.virt.hardware [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 618.559587] env[68638]: DEBUG nova.virt.hardware [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 618.559587] env[68638]: DEBUG nova.virt.hardware [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 618.559587] env[68638]: DEBUG nova.virt.hardware [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 618.559587] env[68638]: DEBUG nova.virt.hardware [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 618.559838] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 618.561559] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8158a967-6039-4b64-a6c3-137bc6b412ad {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.567581] env[68638]: DEBUG nova.network.neutron [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Updated VIF entry in instance network info cache for port 5ee0473a-5eb7-4b42-b970-cb92565f8dd5. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 618.568635] env[68638]: DEBUG nova.network.neutron [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Updating instance_info_cache with network_info: [{"id": "5ee0473a-5eb7-4b42-b970-cb92565f8dd5", "address": "fa:16:3e:14:60:7d", "network": {"id": "08d42292-b840-4bc6-bed8-5ccd8a3a2a29", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1723671578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dce3b1af81bc4cec877ef5a7e6999a7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3836fb52-19c6-4e10-a0ca-f0bca73dc887", "external-id": "nsx-vlan-transportzone-964", "segmentation_id": 964, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ee0473a-5e", "ovs_interfaceid": "5ee0473a-5eb7-4b42-b970-cb92565f8dd5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.570262] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0dff21e9-9160-4ab9-bbfd-f17b0df12e9f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.586699] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fccdc25-20e3-42ae-b302-9eae4e038ede {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.591413] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Waiting for the task: (returnval){ [ 618.591413] env[68638]: value = "task-2833091" [ 618.591413] env[68638]: _type = "Task" [ 618.591413] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.613415] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "f767af17-f2bb-461d-9e7f-9c62b5504257" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.253s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 618.624083] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Task: {'id': task-2833091, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.627361] env[68638]: DEBUG nova.compute.manager [req-a572a327-95ca-4765-84ec-99def2d22e5f req-4eeaa2d4-1e44-4285-a5a3-7333082fa966 service nova] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Received event network-vif-deleted-f3550783-066c-4341-b12e-157c8000cb63 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 618.632537] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833087, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.707556] env[68638]: DEBUG oslo_vmware.api [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833088, 'name': Destroy_Task, 'duration_secs': 0.470013} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.707556] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Destroyed the VM [ 618.708032] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Deleting Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 618.708171] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5d613588-84a0-4cc6-8c02-6609c2eed09e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.715728] env[68638]: DEBUG oslo_vmware.api [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 618.715728] env[68638]: value = "task-2833092" [ 618.715728] env[68638]: _type = "Task" [ 618.715728] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.729573] env[68638]: DEBUG oslo_vmware.api [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833092, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.771294] env[68638]: DEBUG nova.compute.manager [req-88b67ee7-e9f0-47d0-bc9e-e9758ae49809 req-545462f4-6542-4045-9e3b-45c0d92f1e70 service nova] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Received event network-changed-173c13c1-a5ec-4a6b-98d8-e039626a047e {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 618.771806] env[68638]: DEBUG nova.compute.manager [req-88b67ee7-e9f0-47d0-bc9e-e9758ae49809 req-545462f4-6542-4045-9e3b-45c0d92f1e70 service nova] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Refreshing instance network info cache due to event network-changed-173c13c1-a5ec-4a6b-98d8-e039626a047e. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 618.771967] env[68638]: DEBUG oslo_concurrency.lockutils [req-88b67ee7-e9f0-47d0-bc9e-e9758ae49809 req-545462f4-6542-4045-9e3b-45c0d92f1e70 service nova] Acquiring lock "refresh_cache-4b5c5b9e-389d-4ed9-a860-bd41a33fbac4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.772070] env[68638]: DEBUG oslo_concurrency.lockutils [req-88b67ee7-e9f0-47d0-bc9e-e9758ae49809 req-545462f4-6542-4045-9e3b-45c0d92f1e70 service nova] Acquired lock "refresh_cache-4b5c5b9e-389d-4ed9-a860-bd41a33fbac4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 618.772274] env[68638]: DEBUG nova.network.neutron [req-88b67ee7-e9f0-47d0-bc9e-e9758ae49809 req-545462f4-6542-4045-9e3b-45c0d92f1e70 service nova] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Refreshing network info cache for port 173c13c1-a5ec-4a6b-98d8-e039626a047e {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 618.778793] env[68638]: DEBUG nova.scheduler.client.report [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 618.784716] env[68638]: DEBUG oslo_vmware.api [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833089, 'name': PowerOffVM_Task, 'duration_secs': 0.213604} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.784716] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 618.784716] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Updating instance '7617a7b1-3b21-4d38-b090-1d35bc74637b' progress to 17 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 618.817258] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833090, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.074333] env[68638]: DEBUG oslo_concurrency.lockutils [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] Releasing lock "refresh_cache-8f841b29-0156-414e-8467-c9a9393cdae9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 619.074624] env[68638]: DEBUG nova.compute.manager [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Received event network-changed-93a70134-a907-41d4-bce1-2bf7496b23bb {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 619.074847] env[68638]: DEBUG nova.compute.manager [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Refreshing instance network info cache due to event network-changed-93a70134-a907-41d4-bce1-2bf7496b23bb. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 619.075135] env[68638]: DEBUG oslo_concurrency.lockutils [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] Acquiring lock "refresh_cache-ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.075439] env[68638]: DEBUG oslo_concurrency.lockutils [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] Acquired lock "refresh_cache-ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 619.075439] env[68638]: DEBUG nova.network.neutron [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Refreshing network info cache for port 93a70134-a907-41d4-bce1-2bf7496b23bb {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 619.105688] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Task: {'id': task-2833091, 'name': PowerOffVM_Task, 'duration_secs': 0.281088} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.106273] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 619.106273] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 619.107399] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa63f441-de14-4f84-9a09-eac319cdc2dd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.115071] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 619.119492] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5792e738-aa9a-4daf-b1e6-ca6407df1351 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.127193] env[68638]: DEBUG nova.compute.manager [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 619.128577] env[68638]: DEBUG oslo_vmware.api [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833087, 'name': PowerOnVM_Task, 'duration_secs': 0.666968} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.130023] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 619.130475] env[68638]: INFO nova.compute.manager [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Took 8.93 seconds to spawn the instance on the hypervisor. [ 619.130475] env[68638]: DEBUG nova.compute.manager [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 619.131264] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd907d6-539a-4f23-a8c3-550d33d61ed7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.149181] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 619.149414] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 619.149748] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Deleting the datastore file [datastore2] c71693e9-aeaa-4f12-b5cf-a179e558505d {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 619.150235] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26d68a0c-5f4c-45fb-954c-0d8e74e065fc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.158520] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Waiting for the task: (returnval){ [ 619.158520] env[68638]: value = "task-2833094" [ 619.158520] env[68638]: _type = "Task" [ 619.158520] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.173012] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Task: {'id': task-2833094, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.228906] env[68638]: DEBUG oslo_vmware.api [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833092, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.289203] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.795s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 619.289878] env[68638]: DEBUG nova.compute.manager [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 619.295862] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.071s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 619.299067] env[68638]: INFO nova.compute.claims [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 619.306350] env[68638]: DEBUG nova.virt.hardware [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 619.306896] env[68638]: DEBUG nova.virt.hardware [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 619.306896] env[68638]: DEBUG nova.virt.hardware [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 619.307936] env[68638]: DEBUG nova.virt.hardware [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 619.307936] env[68638]: DEBUG nova.virt.hardware [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 619.307936] env[68638]: DEBUG nova.virt.hardware [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 619.307936] env[68638]: DEBUG nova.virt.hardware [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 619.307936] env[68638]: DEBUG nova.virt.hardware [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 619.308191] env[68638]: DEBUG nova.virt.hardware [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 619.308419] env[68638]: DEBUG nova.virt.hardware [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 619.308888] env[68638]: DEBUG nova.virt.hardware [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 619.317344] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f5ddb43-c547-458e-a6cb-17ecac5f4f1d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.344550] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833090, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517105} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.346390] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] a5dedd3e-a544-4005-bc9b-0735267d6753/a5dedd3e-a544-4005-bc9b-0735267d6753.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 619.346667] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 619.347037] env[68638]: DEBUG oslo_vmware.api [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 619.347037] env[68638]: value = "task-2833095" [ 619.347037] env[68638]: _type = "Task" [ 619.347037] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.348990] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-475b7fbe-4557-4f13-ac8a-2abb74a0b30a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.362569] env[68638]: DEBUG oslo_vmware.api [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833095, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.363217] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Waiting for the task: (returnval){ [ 619.363217] env[68638]: value = "task-2833096" [ 619.363217] env[68638]: _type = "Task" [ 619.363217] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.372438] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833096, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.658480] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 619.664416] env[68638]: INFO nova.compute.manager [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Took 26.26 seconds to build instance. [ 619.673319] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Task: {'id': task-2833094, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096659} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.673591] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 619.674184] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 619.674184] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 619.727784] env[68638]: DEBUG oslo_vmware.api [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833092, 'name': RemoveSnapshot_Task, 'duration_secs': 0.995367} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.728443] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Deleted Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 619.819897] env[68638]: DEBUG nova.compute.utils [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 619.825018] env[68638]: DEBUG nova.compute.manager [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 619.825018] env[68638]: DEBUG nova.network.neutron [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 619.862066] env[68638]: DEBUG oslo_vmware.api [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833095, 'name': ReconfigVM_Task, 'duration_secs': 0.344441} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.862432] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Updating instance '7617a7b1-3b21-4d38-b090-1d35bc74637b' progress to 33 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 619.880326] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833096, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.217412} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.880326] env[68638]: DEBUG nova.network.neutron [req-88b67ee7-e9f0-47d0-bc9e-e9758ae49809 req-545462f4-6542-4045-9e3b-45c0d92f1e70 service nova] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Updated VIF entry in instance network info cache for port 173c13c1-a5ec-4a6b-98d8-e039626a047e. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 619.880711] env[68638]: DEBUG nova.network.neutron [req-88b67ee7-e9f0-47d0-bc9e-e9758ae49809 req-545462f4-6542-4045-9e3b-45c0d92f1e70 service nova] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Updating instance_info_cache with network_info: [{"id": "173c13c1-a5ec-4a6b-98d8-e039626a047e", "address": "fa:16:3e:df:e0:03", "network": {"id": "d9d5f06a-7963-44e3-8a0a-c839c3624857", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-336879211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.239", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d127964153f4854b10dfc8f8eb0009d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffcecdaa-a7b8-49fc-9371-dbdb7744688e", "external-id": "nsx-vlan-transportzone-994", "segmentation_id": 994, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap173c13c1-a5", "ovs_interfaceid": "173c13c1-a5ec-4a6b-98d8-e039626a047e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.881098] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 619.884867] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e501b831-6bd1-4011-bc6d-3cf0f51de01c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.915162] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] a5dedd3e-a544-4005-bc9b-0735267d6753/a5dedd3e-a544-4005-bc9b-0735267d6753.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 619.916846] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-87ad2906-7908-4092-9e3d-f9a91daff86e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.940890] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Waiting for the task: (returnval){ [ 619.940890] env[68638]: value = "task-2833097" [ 619.940890] env[68638]: _type = "Task" [ 619.940890] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.950356] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833097, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.060154] env[68638]: DEBUG nova.policy [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6003cfe294fd431e9bafa9a892e709af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '66dfb5ac98d24239a85fb5dc6ed239fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 620.168881] env[68638]: DEBUG oslo_concurrency.lockutils [None req-563cb38c-9eae-4d0d-9fdf-d4ba242773c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "8f841b29-0156-414e-8467-c9a9393cdae9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.774s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 620.235410] env[68638]: WARNING nova.compute.manager [None req-4c278f28-1b74-4b00-9cd1-150bc3d9dcf4 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Image not found during snapshot: nova.exception.ImageNotFound: Image 4eb37f28-a930-45b2-9458-d6e77436b6ef could not be found. [ 620.327415] env[68638]: DEBUG nova.compute.manager [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 620.373484] env[68638]: DEBUG nova.virt.hardware [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 620.373751] env[68638]: DEBUG nova.virt.hardware [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 620.373901] env[68638]: DEBUG nova.virt.hardware [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 620.374148] env[68638]: DEBUG nova.virt.hardware [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 620.374314] env[68638]: DEBUG nova.virt.hardware [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 620.374389] env[68638]: DEBUG nova.virt.hardware [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 620.374576] env[68638]: DEBUG nova.virt.hardware [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 620.374998] env[68638]: DEBUG nova.virt.hardware [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 620.374998] env[68638]: DEBUG nova.virt.hardware [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 620.384419] env[68638]: DEBUG nova.virt.hardware [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 620.385691] env[68638]: DEBUG nova.virt.hardware [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 620.394513] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Reconfiguring VM instance instance-00000005 to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 620.399988] env[68638]: DEBUG oslo_concurrency.lockutils [req-88b67ee7-e9f0-47d0-bc9e-e9758ae49809 req-545462f4-6542-4045-9e3b-45c0d92f1e70 service nova] Releasing lock "refresh_cache-4b5c5b9e-389d-4ed9-a860-bd41a33fbac4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 620.400527] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ecf412f-6ea7-42ea-bf0d-01e0253e4059 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.422898] env[68638]: DEBUG oslo_vmware.api [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 620.422898] env[68638]: value = "task-2833098" [ 620.422898] env[68638]: _type = "Task" [ 620.422898] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.433063] env[68638]: DEBUG oslo_vmware.api [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833098, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.455667] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833097, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.617909] env[68638]: DEBUG nova.network.neutron [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Successfully updated port: c94367eb-4dac-4137-92b7-00d32ad0be7c {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 620.670811] env[68638]: DEBUG nova.compute.manager [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 620.696884] env[68638]: DEBUG nova.network.neutron [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Updated VIF entry in instance network info cache for port 93a70134-a907-41d4-bce1-2bf7496b23bb. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 620.697319] env[68638]: DEBUG nova.network.neutron [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Updating instance_info_cache with network_info: [{"id": "93a70134-a907-41d4-bce1-2bf7496b23bb", "address": "fa:16:3e:d9:a0:eb", "network": {"id": "09a58246-fcd1-46a7-b760-d6ab4d363cd6", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1998833156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e4d1720c32b4e559739d6cbc868a0f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b5f9472-1844-4c99-8804-8f193cfff562", "external-id": "nsx-vlan-transportzone-445", "segmentation_id": 445, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93a70134-a9", "ovs_interfaceid": "93a70134-a907-41d4-bce1-2bf7496b23bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.732112] env[68638]: DEBUG nova.virt.hardware [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 620.735345] env[68638]: DEBUG nova.virt.hardware [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 620.735534] env[68638]: DEBUG nova.virt.hardware [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 620.735690] env[68638]: DEBUG nova.virt.hardware [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 620.735834] env[68638]: DEBUG nova.virt.hardware [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 620.735976] env[68638]: DEBUG nova.virt.hardware [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 620.738339] env[68638]: DEBUG nova.virt.hardware [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 620.738520] env[68638]: DEBUG nova.virt.hardware [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 620.738939] env[68638]: DEBUG nova.virt.hardware [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 620.738939] env[68638]: DEBUG nova.virt.hardware [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 620.739069] env[68638]: DEBUG nova.virt.hardware [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 620.740661] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1c9b237-a2ec-48d7-b6f3-4f2f4f29f18c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.757109] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf538e3-70fd-4c21-b1d9-cda74f30f534 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.781512] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Instance VIF info [] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 620.786404] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 620.789807] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 620.791972] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c9561789-0558-447a-94c2-c4636f8113e3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.817292] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 620.817292] env[68638]: value = "task-2833099" [ 620.817292] env[68638]: _type = "Task" [ 620.817292] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.825865] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833099, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.932643] env[68638]: DEBUG oslo_vmware.api [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833098, 'name': ReconfigVM_Task, 'duration_secs': 0.26709} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.932945] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Reconfigured VM instance instance-00000005 to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 620.933782] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e430aaae-c270-47de-9457-179273c21c6f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.962362] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] 7617a7b1-3b21-4d38-b090-1d35bc74637b/7617a7b1-3b21-4d38-b090-1d35bc74637b.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 620.965915] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ebc54f07-174f-44ff-ace6-d31aa3334b8f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.991588] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833097, 'name': ReconfigVM_Task, 'duration_secs': 0.573564} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.996414] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Reconfigured VM instance instance-00000010 to attach disk [datastore1] a5dedd3e-a544-4005-bc9b-0735267d6753/a5dedd3e-a544-4005-bc9b-0735267d6753.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 620.997353] env[68638]: DEBUG oslo_vmware.api [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 620.997353] env[68638]: value = "task-2833100" [ 620.997353] env[68638]: _type = "Task" [ 620.997353] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.997988] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a6a647e3-e0fb-42ca-962e-4d95c9c5d45a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.009642] env[68638]: DEBUG oslo_vmware.api [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833100, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.012050] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Waiting for the task: (returnval){ [ 621.012050] env[68638]: value = "task-2833101" [ 621.012050] env[68638]: _type = "Task" [ 621.012050] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.033670] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833101, 'name': Rename_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.087320] env[68638]: DEBUG nova.network.neutron [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Successfully created port: 1125034b-dba0-498d-89a1-db63e8fd03ad {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 621.121238] env[68638]: DEBUG oslo_concurrency.lockutils [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "refresh_cache-1946baab-bb48-4138-8db6-1f530e432c3d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.121238] env[68638]: DEBUG oslo_concurrency.lockutils [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquired lock "refresh_cache-1946baab-bb48-4138-8db6-1f530e432c3d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 621.121238] env[68638]: DEBUG nova.network.neutron [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 621.145231] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-802739af-f679-42f0-9652-61565c45f506 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.160829] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d7e9f0-73bd-460b-be4b-7dfb80648045 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.200175] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f4f583-2ba8-4f03-9046-569779a153e3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.205555] env[68638]: DEBUG oslo_concurrency.lockutils [req-b10035c0-0406-4d64-9827-ac731de76d84 req-2ccf61ff-0e3b-4501-bbe4-eafb3991d34e service nova] Releasing lock "refresh_cache-ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 621.212054] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a58e8c0-b187-4809-907c-7e251b88584a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.232869] env[68638]: DEBUG nova.compute.provider_tree [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 621.237393] env[68638]: DEBUG oslo_concurrency.lockutils [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 621.305563] env[68638]: DEBUG nova.compute.manager [req-77fee217-1ab4-4c5b-a69f-c1b134e2e435 req-9f3b4b91-09b6-4cac-bc5c-2eca9d0bdce6 service nova] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Received event network-vif-plugged-c94367eb-4dac-4137-92b7-00d32ad0be7c {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 621.306812] env[68638]: DEBUG oslo_concurrency.lockutils [req-77fee217-1ab4-4c5b-a69f-c1b134e2e435 req-9f3b4b91-09b6-4cac-bc5c-2eca9d0bdce6 service nova] Acquiring lock "1946baab-bb48-4138-8db6-1f530e432c3d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 621.307823] env[68638]: DEBUG oslo_concurrency.lockutils [req-77fee217-1ab4-4c5b-a69f-c1b134e2e435 req-9f3b4b91-09b6-4cac-bc5c-2eca9d0bdce6 service nova] Lock "1946baab-bb48-4138-8db6-1f530e432c3d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 621.307823] env[68638]: DEBUG oslo_concurrency.lockutils [req-77fee217-1ab4-4c5b-a69f-c1b134e2e435 req-9f3b4b91-09b6-4cac-bc5c-2eca9d0bdce6 service nova] Lock "1946baab-bb48-4138-8db6-1f530e432c3d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 621.307823] env[68638]: DEBUG nova.compute.manager [req-77fee217-1ab4-4c5b-a69f-c1b134e2e435 req-9f3b4b91-09b6-4cac-bc5c-2eca9d0bdce6 service nova] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] No waiting events found dispatching network-vif-plugged-c94367eb-4dac-4137-92b7-00d32ad0be7c {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 621.307823] env[68638]: WARNING nova.compute.manager [req-77fee217-1ab4-4c5b-a69f-c1b134e2e435 req-9f3b4b91-09b6-4cac-bc5c-2eca9d0bdce6 service nova] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Received unexpected event network-vif-plugged-c94367eb-4dac-4137-92b7-00d32ad0be7c for instance with vm_state building and task_state spawning. [ 621.308175] env[68638]: DEBUG nova.compute.manager [req-77fee217-1ab4-4c5b-a69f-c1b134e2e435 req-9f3b4b91-09b6-4cac-bc5c-2eca9d0bdce6 service nova] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Received event network-changed-c94367eb-4dac-4137-92b7-00d32ad0be7c {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 621.308175] env[68638]: DEBUG nova.compute.manager [req-77fee217-1ab4-4c5b-a69f-c1b134e2e435 req-9f3b4b91-09b6-4cac-bc5c-2eca9d0bdce6 service nova] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Refreshing instance network info cache due to event network-changed-c94367eb-4dac-4137-92b7-00d32ad0be7c. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 621.308337] env[68638]: DEBUG oslo_concurrency.lockutils [req-77fee217-1ab4-4c5b-a69f-c1b134e2e435 req-9f3b4b91-09b6-4cac-bc5c-2eca9d0bdce6 service nova] Acquiring lock "refresh_cache-1946baab-bb48-4138-8db6-1f530e432c3d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.331114] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833099, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.341378] env[68638]: DEBUG nova.compute.manager [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 621.378798] env[68638]: DEBUG nova.virt.hardware [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 621.379353] env[68638]: DEBUG nova.virt.hardware [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 621.379353] env[68638]: DEBUG nova.virt.hardware [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 621.379485] env[68638]: DEBUG nova.virt.hardware [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 621.379700] env[68638]: DEBUG nova.virt.hardware [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 621.379795] env[68638]: DEBUG nova.virt.hardware [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 621.379999] env[68638]: DEBUG nova.virt.hardware [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 621.380510] env[68638]: DEBUG nova.virt.hardware [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 621.380659] env[68638]: DEBUG nova.virt.hardware [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 621.381787] env[68638]: DEBUG nova.virt.hardware [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 621.381787] env[68638]: DEBUG nova.virt.hardware [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 621.382652] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-443e18a1-a0fd-4d5e-bcf7-dd6818b1b536 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.393264] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-959d8095-f61b-46d3-a0c7-657756ba59e1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.510942] env[68638]: DEBUG oslo_vmware.api [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833100, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.521796] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "f767af17-f2bb-461d-9e7f-9c62b5504257" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 621.522089] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "f767af17-f2bb-461d-9e7f-9c62b5504257" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 621.522286] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "f767af17-f2bb-461d-9e7f-9c62b5504257-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 621.522458] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "f767af17-f2bb-461d-9e7f-9c62b5504257-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 621.522649] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "f767af17-f2bb-461d-9e7f-9c62b5504257-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 621.530209] env[68638]: INFO nova.compute.manager [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Terminating instance [ 621.539459] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833101, 'name': Rename_Task, 'duration_secs': 0.248488} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.542457] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 621.542457] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-96bed758-04e3-4883-b619-f1ca1b311bc9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.549481] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Waiting for the task: (returnval){ [ 621.549481] env[68638]: value = "task-2833102" [ 621.549481] env[68638]: _type = "Task" [ 621.549481] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.561317] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833102, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.691899] env[68638]: DEBUG nova.network.neutron [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 621.737683] env[68638]: DEBUG nova.scheduler.client.report [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 621.830829] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833099, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.925488] env[68638]: DEBUG nova.network.neutron [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Updating instance_info_cache with network_info: [{"id": "c94367eb-4dac-4137-92b7-00d32ad0be7c", "address": "fa:16:3e:4e:e1:ee", "network": {"id": "2dc0e495-5a5a-47e0-8c1c-61e000194cc0", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-658812124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "373459ee626847e9886e5ff353729280", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc94367eb-4d", "ovs_interfaceid": "c94367eb-4dac-4137-92b7-00d32ad0be7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.013135] env[68638]: DEBUG oslo_vmware.api [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833100, 'name': ReconfigVM_Task, 'duration_secs': 1.006634} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.013135] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Reconfigured VM instance instance-00000005 to attach disk [datastore2] 7617a7b1-3b21-4d38-b090-1d35bc74637b/7617a7b1-3b21-4d38-b090-1d35bc74637b.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 622.013407] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Updating instance '7617a7b1-3b21-4d38-b090-1d35bc74637b' progress to 50 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 622.043073] env[68638]: DEBUG nova.compute.manager [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 622.043324] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 622.044310] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba67a5d-3986-4c3e-9886-f9530e59a4f7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.058277] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 622.061559] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d73942e-9ea7-4a7d-aca0-0f8e862357dc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.069646] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833102, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.071851] env[68638]: DEBUG oslo_vmware.api [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 622.071851] env[68638]: value = "task-2833103" [ 622.071851] env[68638]: _type = "Task" [ 622.071851] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.081870] env[68638]: DEBUG oslo_vmware.api [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833103, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.131419] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Acquiring lock "f43dae1e-3442-450a-b9e8-3884504a2b38" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.131899] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Lock "f43dae1e-3442-450a-b9e8-3884504a2b38" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.242667] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.947s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 622.243311] env[68638]: DEBUG nova.compute.manager [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 622.247979] env[68638]: DEBUG oslo_concurrency.lockutils [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.283s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.247979] env[68638]: DEBUG nova.objects.instance [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Lazy-loading 'resources' on Instance uuid 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 622.268286] env[68638]: DEBUG oslo_concurrency.lockutils [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "8f841b29-0156-414e-8467-c9a9393cdae9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.268286] env[68638]: DEBUG oslo_concurrency.lockutils [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "8f841b29-0156-414e-8467-c9a9393cdae9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.268286] env[68638]: DEBUG oslo_concurrency.lockutils [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "8f841b29-0156-414e-8467-c9a9393cdae9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.268286] env[68638]: DEBUG oslo_concurrency.lockutils [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "8f841b29-0156-414e-8467-c9a9393cdae9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.268577] env[68638]: DEBUG oslo_concurrency.lockutils [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "8f841b29-0156-414e-8467-c9a9393cdae9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 622.275970] env[68638]: INFO nova.compute.manager [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Terminating instance [ 622.330741] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833099, 'name': CreateVM_Task, 'duration_secs': 1.076637} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.330911] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 622.331355] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.331560] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 622.331843] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 622.332109] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e9086d6-f16b-4d6c-a444-27ed24143cd0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.337113] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Waiting for the task: (returnval){ [ 622.337113] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]525f1f02-ab18-20ff-327d-5d6df14097b4" [ 622.337113] env[68638]: _type = "Task" [ 622.337113] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.348471] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525f1f02-ab18-20ff-327d-5d6df14097b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.435022] env[68638]: DEBUG oslo_concurrency.lockutils [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Releasing lock "refresh_cache-1946baab-bb48-4138-8db6-1f530e432c3d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 622.435022] env[68638]: DEBUG nova.compute.manager [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Instance network_info: |[{"id": "c94367eb-4dac-4137-92b7-00d32ad0be7c", "address": "fa:16:3e:4e:e1:ee", "network": {"id": "2dc0e495-5a5a-47e0-8c1c-61e000194cc0", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-658812124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "373459ee626847e9886e5ff353729280", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc94367eb-4d", "ovs_interfaceid": "c94367eb-4dac-4137-92b7-00d32ad0be7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 622.435296] env[68638]: DEBUG oslo_concurrency.lockutils [req-77fee217-1ab4-4c5b-a69f-c1b134e2e435 req-9f3b4b91-09b6-4cac-bc5c-2eca9d0bdce6 service nova] Acquired lock "refresh_cache-1946baab-bb48-4138-8db6-1f530e432c3d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 622.435296] env[68638]: DEBUG nova.network.neutron [req-77fee217-1ab4-4c5b-a69f-c1b134e2e435 req-9f3b4b91-09b6-4cac-bc5c-2eca9d0bdce6 service nova] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Refreshing network info cache for port c94367eb-4dac-4137-92b7-00d32ad0be7c {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 622.435296] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:e1:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa8c2f93-f287-41b3-adb6-4942a7ea2a0b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c94367eb-4dac-4137-92b7-00d32ad0be7c', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 622.443414] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Creating folder: Project (373459ee626847e9886e5ff353729280). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 622.447734] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4496d0bf-3bbc-4a1d-8861-a7dfebefc21a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.460296] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Created folder: Project (373459ee626847e9886e5ff353729280) in parent group-v569734. [ 622.460787] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Creating folder: Instances. Parent ref: group-v569785. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 622.461176] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2b6ff209-ad0f-440f-ac1b-1c114ec2d8bb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.470070] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Created folder: Instances in parent group-v569785. [ 622.470517] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 622.472019] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 622.472019] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a6dd0bb8-6491-4836-a3b4-92699d048ebb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.492386] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 622.492386] env[68638]: value = "task-2833106" [ 622.492386] env[68638]: _type = "Task" [ 622.492386] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.501405] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833106, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.522168] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99eda761-bcf6-46f0-9aaf-ce0d2e726d77 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.540484] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82aa123d-7ab7-41a3-9151-8494bc36d37d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.558756] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Updating instance '7617a7b1-3b21-4d38-b090-1d35bc74637b' progress to 67 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 622.570920] env[68638]: DEBUG oslo_vmware.api [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833102, 'name': PowerOnVM_Task, 'duration_secs': 0.654225} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.571178] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 622.571464] env[68638]: INFO nova.compute.manager [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Took 9.68 seconds to spawn the instance on the hypervisor. [ 622.571578] env[68638]: DEBUG nova.compute.manager [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 622.572426] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7d7eae-652e-4fa4-bcf3-6bdff13eb1db {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.590200] env[68638]: DEBUG oslo_vmware.api [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833103, 'name': PowerOffVM_Task, 'duration_secs': 0.316678} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.590740] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 622.591012] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 622.591549] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6b2fdd82-fb03-4814-806e-7af4439b62bc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.651139] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 622.651662] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 622.651769] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Deleting the datastore file [datastore2] f767af17-f2bb-461d-9e7f-9c62b5504257 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 622.652018] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c278eff2-2744-4542-9037-c5744977d3b5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.658492] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquiring lock "6cb1846a-02aa-4dc3-a573-858abf5a0bdf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.658492] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "6cb1846a-02aa-4dc3-a573-858abf5a0bdf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.658492] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquiring lock "6cb1846a-02aa-4dc3-a573-858abf5a0bdf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.658492] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "6cb1846a-02aa-4dc3-a573-858abf5a0bdf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.658853] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "6cb1846a-02aa-4dc3-a573-858abf5a0bdf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 622.662534] env[68638]: INFO nova.compute.manager [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Terminating instance [ 622.669092] env[68638]: DEBUG oslo_vmware.api [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 622.669092] env[68638]: value = "task-2833108" [ 622.669092] env[68638]: _type = "Task" [ 622.669092] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.681663] env[68638]: DEBUG oslo_vmware.api [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833108, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.752161] env[68638]: DEBUG nova.compute.utils [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 622.753787] env[68638]: DEBUG nova.compute.manager [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 622.754031] env[68638]: DEBUG nova.network.neutron [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 622.769990] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Acquiring lock "ebd7dd7a-2565-45da-bf7a-b8047c54ebe4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.770176] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Lock "ebd7dd7a-2565-45da-bf7a-b8047c54ebe4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.783030] env[68638]: DEBUG nova.compute.manager [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 622.783289] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 622.784587] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac87fcd3-d0bd-4155-aaab-d4e16e81063b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.792860] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 622.793165] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b36dbadb-657e-4361-8707-a80257d0c118 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.806880] env[68638]: DEBUG oslo_vmware.api [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 622.806880] env[68638]: value = "task-2833109" [ 622.806880] env[68638]: _type = "Task" [ 622.806880] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.816536] env[68638]: DEBUG oslo_vmware.api [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833109, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.849249] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525f1f02-ab18-20ff-327d-5d6df14097b4, 'name': SearchDatastore_Task, 'duration_secs': 0.014521} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.849689] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 622.850458] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 622.851131] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.851329] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 622.851653] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 622.851989] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b191429b-7955-47cc-9509-c62da43301c5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.855912] env[68638]: DEBUG nova.policy [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3b38f7816ced4b019a472fd7f9d065cb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '482784eedfe64606b2a9b86e31f0a20f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 622.869246] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 622.869478] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 622.871156] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d8aa567-a062-453f-8968-2b905cbf6635 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.880728] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Waiting for the task: (returnval){ [ 622.880728] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52f0ffc2-e6ed-e87f-1ba1-6f1c16cc93a8" [ 622.880728] env[68638]: _type = "Task" [ 622.880728] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.890407] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f0ffc2-e6ed-e87f-1ba1-6f1c16cc93a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.894946] env[68638]: DEBUG nova.network.neutron [req-77fee217-1ab4-4c5b-a69f-c1b134e2e435 req-9f3b4b91-09b6-4cac-bc5c-2eca9d0bdce6 service nova] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Updated VIF entry in instance network info cache for port c94367eb-4dac-4137-92b7-00d32ad0be7c. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 622.895433] env[68638]: DEBUG nova.network.neutron [req-77fee217-1ab4-4c5b-a69f-c1b134e2e435 req-9f3b4b91-09b6-4cac-bc5c-2eca9d0bdce6 service nova] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Updating instance_info_cache with network_info: [{"id": "c94367eb-4dac-4137-92b7-00d32ad0be7c", "address": "fa:16:3e:4e:e1:ee", "network": {"id": "2dc0e495-5a5a-47e0-8c1c-61e000194cc0", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-658812124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "373459ee626847e9886e5ff353729280", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc94367eb-4d", "ovs_interfaceid": "c94367eb-4dac-4137-92b7-00d32ad0be7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.932804] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "1b176c5d-e77c-410b-b282-b7bba65359a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.932804] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "1b176c5d-e77c-410b-b282-b7bba65359a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 623.004117] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833106, 'name': CreateVM_Task, 'duration_secs': 0.384306} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.004117] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 623.005015] env[68638]: DEBUG oslo_concurrency.lockutils [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.005015] env[68638]: DEBUG oslo_concurrency.lockutils [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 623.005318] env[68638]: DEBUG oslo_concurrency.lockutils [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 623.005399] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db5fe4e0-20b7-447d-aa73-6b6cfbb2a138 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.008824] env[68638]: DEBUG nova.network.neutron [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Successfully updated port: 1125034b-dba0-498d-89a1-db63e8fd03ad {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 623.013057] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 623.013057] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]521b092e-8694-e876-c672-d7cca129b552" [ 623.013057] env[68638]: _type = "Task" [ 623.013057] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.023026] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]521b092e-8694-e876-c672-d7cca129b552, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.104333] env[68638]: INFO nova.compute.manager [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Took 27.67 seconds to build instance. [ 623.137381] env[68638]: DEBUG nova.network.neutron [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Port 41ce015b-dfb7-4031-a11b-8dfd0e29bb62 binding to destination host cpu-1 is already ACTIVE {{(pid=68638) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 623.168818] env[68638]: DEBUG nova.compute.manager [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 623.169059] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 623.170500] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82bdac3-1f93-4a1a-941e-3d30177c197f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.178980] env[68638]: DEBUG nova.compute.manager [req-a1c5acfc-a9d5-439e-8b53-8d07e4f19fee req-74be7c1b-a655-446e-9dfd-bd2bd1c65068 service nova] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Received event network-vif-plugged-1125034b-dba0-498d-89a1-db63e8fd03ad {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 623.179226] env[68638]: DEBUG oslo_concurrency.lockutils [req-a1c5acfc-a9d5-439e-8b53-8d07e4f19fee req-74be7c1b-a655-446e-9dfd-bd2bd1c65068 service nova] Acquiring lock "421c377f-0b7a-457d-b5dd-50281c65122a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 623.179440] env[68638]: DEBUG oslo_concurrency.lockutils [req-a1c5acfc-a9d5-439e-8b53-8d07e4f19fee req-74be7c1b-a655-446e-9dfd-bd2bd1c65068 service nova] Lock "421c377f-0b7a-457d-b5dd-50281c65122a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 623.179638] env[68638]: DEBUG oslo_concurrency.lockutils [req-a1c5acfc-a9d5-439e-8b53-8d07e4f19fee req-74be7c1b-a655-446e-9dfd-bd2bd1c65068 service nova] Lock "421c377f-0b7a-457d-b5dd-50281c65122a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 623.179822] env[68638]: DEBUG nova.compute.manager [req-a1c5acfc-a9d5-439e-8b53-8d07e4f19fee req-74be7c1b-a655-446e-9dfd-bd2bd1c65068 service nova] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] No waiting events found dispatching network-vif-plugged-1125034b-dba0-498d-89a1-db63e8fd03ad {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 623.180014] env[68638]: WARNING nova.compute.manager [req-a1c5acfc-a9d5-439e-8b53-8d07e4f19fee req-74be7c1b-a655-446e-9dfd-bd2bd1c65068 service nova] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Received unexpected event network-vif-plugged-1125034b-dba0-498d-89a1-db63e8fd03ad for instance with vm_state building and task_state spawning. [ 623.190098] env[68638]: DEBUG oslo_vmware.api [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833108, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.220489} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.190098] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 623.190098] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 623.190098] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 623.190098] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 623.190444] env[68638]: INFO nova.compute.manager [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Took 1.15 seconds to destroy the instance on the hypervisor. [ 623.190444] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 623.190717] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa098143-a5df-4432-91ca-e603aee60fee {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.192501] env[68638]: DEBUG nova.compute.manager [-] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 623.192629] env[68638]: DEBUG nova.network.neutron [-] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 623.201028] env[68638]: DEBUG oslo_vmware.api [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 623.201028] env[68638]: value = "task-2833110" [ 623.201028] env[68638]: _type = "Task" [ 623.201028] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.213097] env[68638]: DEBUG oslo_vmware.api [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833110, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.260598] env[68638]: DEBUG nova.compute.manager [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 623.323253] env[68638]: DEBUG oslo_vmware.api [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833109, 'name': PowerOffVM_Task, 'duration_secs': 0.407264} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.323253] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 623.323253] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 623.323253] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-86dcddc6-d6c4-4a6a-a0a1-f6a7e0140754 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.363436] env[68638]: DEBUG nova.network.neutron [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Successfully created port: a4d5833f-aab1-4c4d-9651-ab0440b5d6dd {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 623.390178] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 623.390410] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 623.390584] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Deleting the datastore file [datastore2] 8f841b29-0156-414e-8467-c9a9393cdae9 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 623.391366] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a0ba9f2c-3d5a-4ed3-8d10-e1357b8b2c3c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.400100] env[68638]: DEBUG oslo_concurrency.lockutils [req-77fee217-1ab4-4c5b-a69f-c1b134e2e435 req-9f3b4b91-09b6-4cac-bc5c-2eca9d0bdce6 service nova] Releasing lock "refresh_cache-1946baab-bb48-4138-8db6-1f530e432c3d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 623.401177] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f0ffc2-e6ed-e87f-1ba1-6f1c16cc93a8, 'name': SearchDatastore_Task, 'duration_secs': 0.017937} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.402944] env[68638]: DEBUG oslo_vmware.api [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 623.402944] env[68638]: value = "task-2833112" [ 623.402944] env[68638]: _type = "Task" [ 623.402944] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.402944] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7296f626-a857-461a-877d-3b1facd24325 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.420243] env[68638]: DEBUG oslo_vmware.api [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833112, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.420243] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Waiting for the task: (returnval){ [ 623.420243] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d89ee6-7f60-ea0d-ec8b-092cd5e592bc" [ 623.420243] env[68638]: _type = "Task" [ 623.420243] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.432369] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d89ee6-7f60-ea0d-ec8b-092cd5e592bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.449894] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c82a459-b459-4a88-84c8-7d9d34b21e5e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.458031] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb3038f7-cdc1-4555-bcaa-edca917b1c04 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.492222] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ed8339-0189-43f9-8753-7ba4340845b9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.499894] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8612db0-a988-4be2-8990-3012564a15b3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.514820] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Acquiring lock "refresh_cache-421c377f-0b7a-457d-b5dd-50281c65122a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.514962] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Acquired lock "refresh_cache-421c377f-0b7a-457d-b5dd-50281c65122a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 623.515126] env[68638]: DEBUG nova.network.neutron [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 623.516587] env[68638]: DEBUG nova.compute.provider_tree [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 623.526779] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]521b092e-8694-e876-c672-d7cca129b552, 'name': SearchDatastore_Task, 'duration_secs': 0.021549} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.527079] env[68638]: DEBUG oslo_concurrency.lockutils [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 623.527306] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 623.527529] env[68638]: DEBUG oslo_concurrency.lockutils [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.607211] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b4d8793d-5337-4ec5-88a2-dc4c5881452a tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Lock "a5dedd3e-a544-4005-bc9b-0735267d6753" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.180s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 623.710889] env[68638]: DEBUG oslo_vmware.api [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833110, 'name': PowerOffVM_Task, 'duration_secs': 0.210838} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.711189] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 623.711352] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 623.711629] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-60afdde0-e8ba-4405-978c-75aaa3e16df8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.780214] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 623.780214] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 623.780214] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Deleting the datastore file [datastore2] 6cb1846a-02aa-4dc3-a573-858abf5a0bdf {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 623.780214] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6ea64ee0-ebaa-4283-a96b-ba49eac792ba {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.786253] env[68638]: DEBUG oslo_vmware.api [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 623.786253] env[68638]: value = "task-2833114" [ 623.786253] env[68638]: _type = "Task" [ 623.786253] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.797343] env[68638]: DEBUG oslo_vmware.api [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833114, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.915666] env[68638]: DEBUG oslo_vmware.api [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833112, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.228524} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.915666] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 623.915850] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 623.915996] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 623.916352] env[68638]: INFO nova.compute.manager [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Took 1.13 seconds to destroy the instance on the hypervisor. [ 623.918591] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 623.918591] env[68638]: DEBUG nova.compute.manager [-] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 623.918591] env[68638]: DEBUG nova.network.neutron [-] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 623.927577] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d89ee6-7f60-ea0d-ec8b-092cd5e592bc, 'name': SearchDatastore_Task, 'duration_secs': 0.015522} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.927833] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 623.928194] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] c71693e9-aeaa-4f12-b5cf-a179e558505d/c71693e9-aeaa-4f12-b5cf-a179e558505d.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 623.928391] env[68638]: DEBUG oslo_concurrency.lockutils [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 623.928941] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 623.928941] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-66fc9a83-8f37-41ec-99ee-28d4255471eb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.931053] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1d8cc4b7-d835-4c5f-917a-529600443864 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.936237] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Waiting for the task: (returnval){ [ 623.936237] env[68638]: value = "task-2833115" [ 623.936237] env[68638]: _type = "Task" [ 623.936237] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.940052] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 623.940242] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 623.941251] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6abf3c4-64d0-4a7f-b71d-230aac589530 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.946263] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Task: {'id': task-2833115, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.949141] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 623.949141] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5256db52-8d71-adea-dcc4-9c44feb8c3e6" [ 623.949141] env[68638]: _type = "Task" [ 623.949141] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.958886] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5256db52-8d71-adea-dcc4-9c44feb8c3e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.996472] env[68638]: DEBUG nova.network.neutron [-] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.022985] env[68638]: DEBUG nova.scheduler.client.report [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 624.074312] env[68638]: DEBUG nova.network.neutron [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 624.110564] env[68638]: DEBUG nova.compute.manager [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 624.172674] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "7617a7b1-3b21-4d38-b090-1d35bc74637b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 624.172945] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "7617a7b1-3b21-4d38-b090-1d35bc74637b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 624.173841] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "7617a7b1-3b21-4d38-b090-1d35bc74637b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 624.274353] env[68638]: DEBUG nova.compute.manager [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 624.305388] env[68638]: DEBUG oslo_vmware.api [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833114, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136838} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.305665] env[68638]: DEBUG nova.virt.hardware [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 624.305902] env[68638]: DEBUG nova.virt.hardware [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 624.306065] env[68638]: DEBUG nova.virt.hardware [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 624.306249] env[68638]: DEBUG nova.virt.hardware [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 624.306388] env[68638]: DEBUG nova.virt.hardware [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 624.306538] env[68638]: DEBUG nova.virt.hardware [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 624.306749] env[68638]: DEBUG nova.virt.hardware [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 624.306899] env[68638]: DEBUG nova.virt.hardware [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 624.307087] env[68638]: DEBUG nova.virt.hardware [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 624.307254] env[68638]: DEBUG nova.virt.hardware [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 624.307423] env[68638]: DEBUG nova.virt.hardware [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 624.307743] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 624.307899] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 624.308078] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 624.308267] env[68638]: INFO nova.compute.manager [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Took 1.14 seconds to destroy the instance on the hypervisor. [ 624.308467] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 624.309605] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f80270f2-ce3e-499f-a1e4-d3e55db7c4dc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.312914] env[68638]: DEBUG nova.compute.manager [-] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 624.313044] env[68638]: DEBUG nova.network.neutron [-] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 624.321281] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b2f240-5d8f-4fec-9fef-aaca83dac680 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.380223] env[68638]: DEBUG nova.network.neutron [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Updating instance_info_cache with network_info: [{"id": "1125034b-dba0-498d-89a1-db63e8fd03ad", "address": "fa:16:3e:07:b3:00", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.227", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1125034b-db", "ovs_interfaceid": "1125034b-dba0-498d-89a1-db63e8fd03ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.447384] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Task: {'id': task-2833115, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.459242] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5256db52-8d71-adea-dcc4-9c44feb8c3e6, 'name': SearchDatastore_Task, 'duration_secs': 0.008233} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.460193] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c047a92d-2a51-41d2-b588-706c5fa46789 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.466507] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 624.466507] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]523ba475-c670-3041-c3bc-3a7eab506249" [ 624.466507] env[68638]: _type = "Task" [ 624.466507] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.474431] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523ba475-c670-3041-c3bc-3a7eab506249, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.499110] env[68638]: INFO nova.compute.manager [-] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Took 1.31 seconds to deallocate network for instance. [ 624.528596] env[68638]: DEBUG oslo_concurrency.lockutils [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.281s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 624.531339] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.784s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 624.534292] env[68638]: INFO nova.compute.claims [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 624.575482] env[68638]: INFO nova.scheduler.client.report [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Deleted allocations for instance 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc [ 624.645542] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 624.782110] env[68638]: DEBUG nova.network.neutron [-] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.883275] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Releasing lock "refresh_cache-421c377f-0b7a-457d-b5dd-50281c65122a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 624.883610] env[68638]: DEBUG nova.compute.manager [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Instance network_info: |[{"id": "1125034b-dba0-498d-89a1-db63e8fd03ad", "address": "fa:16:3e:07:b3:00", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.227", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1125034b-db", "ovs_interfaceid": "1125034b-dba0-498d-89a1-db63e8fd03ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 624.884040] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:b3:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '205fb402-8eaf-4b61-8f57-8f216024179a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1125034b-dba0-498d-89a1-db63e8fd03ad', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 624.891877] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Creating folder: Project (66dfb5ac98d24239a85fb5dc6ed239fe). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 624.892205] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-706c9a85-633c-4a74-b254-c707a05c0ca5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.903664] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Created folder: Project (66dfb5ac98d24239a85fb5dc6ed239fe) in parent group-v569734. [ 624.904012] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Creating folder: Instances. Parent ref: group-v569788. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 624.904434] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8264e1d2-6217-482c-b3d6-523f209230c3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.913038] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Created folder: Instances in parent group-v569788. [ 624.913038] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 624.913294] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 624.913374] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7cb7bf0e-594d-430a-b793-e3a31f560145 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.934540] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 624.934540] env[68638]: value = "task-2833118" [ 624.934540] env[68638]: _type = "Task" [ 624.934540] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.945887] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833118, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.949505] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Task: {'id': task-2833115, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534771} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.949772] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] c71693e9-aeaa-4f12-b5cf-a179e558505d/c71693e9-aeaa-4f12-b5cf-a179e558505d.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 624.949961] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 624.950312] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6de62f81-63e3-46af-a341-ebdd6bc8b5f9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.957761] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Waiting for the task: (returnval){ [ 624.957761] env[68638]: value = "task-2833119" [ 624.957761] env[68638]: _type = "Task" [ 624.957761] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.967458] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Task: {'id': task-2833119, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.976028] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523ba475-c670-3041-c3bc-3a7eab506249, 'name': SearchDatastore_Task, 'duration_secs': 0.014508} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.977048] env[68638]: DEBUG oslo_concurrency.lockutils [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 624.977048] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 1946baab-bb48-4138-8db6-1f530e432c3d/1946baab-bb48-4138-8db6-1f530e432c3d.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 624.977048] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7a455692-eac8-40f0-bf5c-8d8859680c21 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.982443] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 624.982443] env[68638]: value = "task-2833120" [ 624.982443] env[68638]: _type = "Task" [ 624.982443] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.992268] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833120, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.007122] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 625.087072] env[68638]: DEBUG oslo_concurrency.lockutils [None req-408c82a9-315f-4230-ae31-a483014514ad tempest-ServerDiagnosticsV248Test-1624483098 tempest-ServerDiagnosticsV248Test-1624483098-project-member] Lock "4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.458s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 625.170852] env[68638]: DEBUG nova.compute.manager [req-1fd566af-8ac9-4089-925a-401798385690 req-79f1f940-c354-438c-b9a0-f6b183cef34c service nova] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Received event network-vif-plugged-a4d5833f-aab1-4c4d-9651-ab0440b5d6dd {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 625.171101] env[68638]: DEBUG oslo_concurrency.lockutils [req-1fd566af-8ac9-4089-925a-401798385690 req-79f1f940-c354-438c-b9a0-f6b183cef34c service nova] Acquiring lock "53571ad6-1fdb-4651-8b4d-24f35ffc815a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 625.171329] env[68638]: DEBUG oslo_concurrency.lockutils [req-1fd566af-8ac9-4089-925a-401798385690 req-79f1f940-c354-438c-b9a0-f6b183cef34c service nova] Lock "53571ad6-1fdb-4651-8b4d-24f35ffc815a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 625.171562] env[68638]: DEBUG oslo_concurrency.lockutils [req-1fd566af-8ac9-4089-925a-401798385690 req-79f1f940-c354-438c-b9a0-f6b183cef34c service nova] Lock "53571ad6-1fdb-4651-8b4d-24f35ffc815a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 625.171878] env[68638]: DEBUG nova.compute.manager [req-1fd566af-8ac9-4089-925a-401798385690 req-79f1f940-c354-438c-b9a0-f6b183cef34c service nova] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] No waiting events found dispatching network-vif-plugged-a4d5833f-aab1-4c4d-9651-ab0440b5d6dd {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 625.171878] env[68638]: WARNING nova.compute.manager [req-1fd566af-8ac9-4089-925a-401798385690 req-79f1f940-c354-438c-b9a0-f6b183cef34c service nova] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Received unexpected event network-vif-plugged-a4d5833f-aab1-4c4d-9651-ab0440b5d6dd for instance with vm_state building and task_state spawning. [ 625.210245] env[68638]: DEBUG nova.network.neutron [-] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.216410] env[68638]: DEBUG nova.compute.manager [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 625.217418] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d320b3-7869-46a5-92f7-1963282901ee {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.237703] env[68638]: DEBUG nova.compute.manager [req-df6acff5-0fcf-4ed2-916e-c40d51bebcde req-027962c8-2452-4537-9216-fc618df7c109 service nova] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Received event network-changed-1125034b-dba0-498d-89a1-db63e8fd03ad {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 625.237959] env[68638]: DEBUG nova.compute.manager [req-df6acff5-0fcf-4ed2-916e-c40d51bebcde req-027962c8-2452-4537-9216-fc618df7c109 service nova] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Refreshing instance network info cache due to event network-changed-1125034b-dba0-498d-89a1-db63e8fd03ad. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 625.238232] env[68638]: DEBUG oslo_concurrency.lockutils [req-df6acff5-0fcf-4ed2-916e-c40d51bebcde req-027962c8-2452-4537-9216-fc618df7c109 service nova] Acquiring lock "refresh_cache-421c377f-0b7a-457d-b5dd-50281c65122a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.238409] env[68638]: DEBUG oslo_concurrency.lockutils [req-df6acff5-0fcf-4ed2-916e-c40d51bebcde req-027962c8-2452-4537-9216-fc618df7c109 service nova] Acquired lock "refresh_cache-421c377f-0b7a-457d-b5dd-50281c65122a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 625.238613] env[68638]: DEBUG nova.network.neutron [req-df6acff5-0fcf-4ed2-916e-c40d51bebcde req-027962c8-2452-4537-9216-fc618df7c109 service nova] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Refreshing network info cache for port 1125034b-dba0-498d-89a1-db63e8fd03ad {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 625.250240] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "refresh_cache-7617a7b1-3b21-4d38-b090-1d35bc74637b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.250925] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquired lock "refresh_cache-7617a7b1-3b21-4d38-b090-1d35bc74637b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 625.251230] env[68638]: DEBUG nova.network.neutron [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 625.285348] env[68638]: INFO nova.compute.manager [-] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Took 1.37 seconds to deallocate network for instance. [ 625.341690] env[68638]: DEBUG nova.network.neutron [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Successfully updated port: a4d5833f-aab1-4c4d-9651-ab0440b5d6dd {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 625.448235] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833118, 'name': CreateVM_Task, 'duration_secs': 0.479197} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.448440] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 625.449277] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.449474] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 625.449914] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 625.450295] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a64c44b-9bb0-499b-8a6f-79fb91ac9a4a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.456697] env[68638]: DEBUG oslo_vmware.api [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Waiting for the task: (returnval){ [ 625.456697] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52327bba-7029-9edf-8c6c-617861aaa1d4" [ 625.456697] env[68638]: _type = "Task" [ 625.456697] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.469913] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Task: {'id': task-2833119, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07381} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.475233] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 625.475612] env[68638]: DEBUG oslo_vmware.api [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52327bba-7029-9edf-8c6c-617861aaa1d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.476513] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe1967f-637c-446b-8c4c-aaff8c211f4c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.504376] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] c71693e9-aeaa-4f12-b5cf-a179e558505d/c71693e9-aeaa-4f12-b5cf-a179e558505d.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 625.508662] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5d5dfa1-75f4-4c4b-991c-e0f7ffb6b598 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.532786] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833120, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.534547] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Waiting for the task: (returnval){ [ 625.534547] env[68638]: value = "task-2833121" [ 625.534547] env[68638]: _type = "Task" [ 625.534547] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.548084] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Task: {'id': task-2833121, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.714459] env[68638]: INFO nova.compute.manager [-] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Took 1.40 seconds to deallocate network for instance. [ 625.742498] env[68638]: INFO nova.compute.manager [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] instance snapshotting [ 625.747803] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ef6c27-64e8-4649-95b5-9dfd25739541 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.777063] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a2ff0f-91b8-4b72-a6dd-7e60e744d5ee {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.796035] env[68638]: DEBUG oslo_concurrency.lockutils [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 625.846731] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Acquiring lock "refresh_cache-53571ad6-1fdb-4651-8b4d-24f35ffc815a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.846731] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Acquired lock "refresh_cache-53571ad6-1fdb-4651-8b4d-24f35ffc815a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 625.847734] env[68638]: DEBUG nova.network.neutron [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 625.966974] env[68638]: DEBUG oslo_vmware.api [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52327bba-7029-9edf-8c6c-617861aaa1d4, 'name': SearchDatastore_Task, 'duration_secs': 0.050409} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.969923] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 625.970340] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 625.970637] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.970821] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 625.971087] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 625.973612] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-241f9719-f883-49d9-85e0-ed5ca9235818 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.983982] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 625.984213] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 625.985288] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03a845aa-0724-4743-932b-584923a47946 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.990846] env[68638]: DEBUG oslo_vmware.api [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Waiting for the task: (returnval){ [ 625.990846] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52a9351e-a978-fc1c-2535-88579e8e26a6" [ 625.990846] env[68638]: _type = "Task" [ 625.990846] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.005434] env[68638]: DEBUG oslo_vmware.api [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a9351e-a978-fc1c-2535-88579e8e26a6, 'name': SearchDatastore_Task, 'duration_secs': 0.010218} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.011134] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833120, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.584182} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.011834] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c86144a-06ea-4f85-9e9f-17169d819da0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.014074] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 1946baab-bb48-4138-8db6-1f530e432c3d/1946baab-bb48-4138-8db6-1f530e432c3d.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 626.014300] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 626.014525] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-02081781-6b28-4d8d-b71d-05de2b39c8aa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.021210] env[68638]: DEBUG oslo_vmware.api [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Waiting for the task: (returnval){ [ 626.021210] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52fda26d-1c80-7537-47da-e578e585a411" [ 626.021210] env[68638]: _type = "Task" [ 626.021210] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.026867] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 626.026867] env[68638]: value = "task-2833122" [ 626.026867] env[68638]: _type = "Task" [ 626.026867] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.041656] env[68638]: DEBUG oslo_vmware.api [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52fda26d-1c80-7537-47da-e578e585a411, 'name': SearchDatastore_Task, 'duration_secs': 0.010331} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.049103] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 626.049103] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 421c377f-0b7a-457d-b5dd-50281c65122a/421c377f-0b7a-457d-b5dd-50281c65122a.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 626.051574] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ae92c46b-8bf0-477f-a864-ec9c99129e0a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.054082] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833122, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.061883] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Task: {'id': task-2833121, 'name': ReconfigVM_Task, 'duration_secs': 0.300501} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.062670] env[68638]: DEBUG oslo_vmware.api [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Waiting for the task: (returnval){ [ 626.062670] env[68638]: value = "task-2833123" [ 626.062670] env[68638]: _type = "Task" [ 626.062670] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.062670] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Reconfigured VM instance instance-0000000a to attach disk [datastore1] c71693e9-aeaa-4f12-b5cf-a179e558505d/c71693e9-aeaa-4f12-b5cf-a179e558505d.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 626.063177] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-341e5db7-ba5c-4b2b-a2c8-b816235e4e4c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.075965] env[68638]: DEBUG oslo_vmware.api [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Task: {'id': task-2833123, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.077422] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Waiting for the task: (returnval){ [ 626.077422] env[68638]: value = "task-2833124" [ 626.077422] env[68638]: _type = "Task" [ 626.077422] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.085712] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Task: {'id': task-2833124, 'name': Rename_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.121823] env[68638]: DEBUG nova.network.neutron [req-df6acff5-0fcf-4ed2-916e-c40d51bebcde req-027962c8-2452-4537-9216-fc618df7c109 service nova] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Updated VIF entry in instance network info cache for port 1125034b-dba0-498d-89a1-db63e8fd03ad. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 626.121984] env[68638]: DEBUG nova.network.neutron [req-df6acff5-0fcf-4ed2-916e-c40d51bebcde req-027962c8-2452-4537-9216-fc618df7c109 service nova] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Updating instance_info_cache with network_info: [{"id": "1125034b-dba0-498d-89a1-db63e8fd03ad", "address": "fa:16:3e:07:b3:00", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.227", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1125034b-db", "ovs_interfaceid": "1125034b-dba0-498d-89a1-db63e8fd03ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.158156] env[68638]: DEBUG nova.network.neutron [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Updating instance_info_cache with network_info: [{"id": "41ce015b-dfb7-4031-a11b-8dfd0e29bb62", "address": "fa:16:3e:17:75:81", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.112", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41ce015b-df", "ovs_interfaceid": "41ce015b-dfb7-4031-a11b-8dfd0e29bb62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.166766] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5951c7d7-3b0d-4665-a0e9-272485fe7a17 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.178179] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e75de904-9aa0-4cad-8d66-efe73c0ab57f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.211120] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a07b81-55d1-4546-acd2-a5d395d04f90 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.219364] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c137b6-aac4-42d6-8a33-875ec7973b36 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.224759] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 626.235440] env[68638]: DEBUG nova.compute.provider_tree [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 626.291927] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Creating Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 626.291927] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0a9b7165-3dc7-475b-99ca-01c785094ac9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.297918] env[68638]: DEBUG oslo_vmware.api [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Waiting for the task: (returnval){ [ 626.297918] env[68638]: value = "task-2833125" [ 626.297918] env[68638]: _type = "Task" [ 626.297918] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.308381] env[68638]: DEBUG oslo_vmware.api [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833125, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.434611] env[68638]: DEBUG nova.network.neutron [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 626.537524] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833122, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078547} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.540540] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 626.541556] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8078c5d2-6844-48f7-a665-23e966fd0d91 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.572969] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] 1946baab-bb48-4138-8db6-1f530e432c3d/1946baab-bb48-4138-8db6-1f530e432c3d.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 626.572969] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ed81e51-ac3e-406d-805c-0de8161290ec {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.607892] env[68638]: DEBUG oslo_vmware.api [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Task: {'id': task-2833123, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509564} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.613013] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 421c377f-0b7a-457d-b5dd-50281c65122a/421c377f-0b7a-457d-b5dd-50281c65122a.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 626.613280] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 626.613770] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 626.613770] env[68638]: value = "task-2833126" [ 626.613770] env[68638]: _type = "Task" [ 626.613770] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.613983] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Task: {'id': task-2833124, 'name': Rename_Task, 'duration_secs': 0.139454} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.615113] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d7b89132-ab29-4ca8-9c6d-998a8026ae98 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.616387] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 626.620508] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-67f5908d-f5f2-406f-89cd-104a190169b0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.627630] env[68638]: DEBUG oslo_concurrency.lockutils [req-df6acff5-0fcf-4ed2-916e-c40d51bebcde req-027962c8-2452-4537-9216-fc618df7c109 service nova] Releasing lock "refresh_cache-421c377f-0b7a-457d-b5dd-50281c65122a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 626.627888] env[68638]: DEBUG nova.compute.manager [req-df6acff5-0fcf-4ed2-916e-c40d51bebcde req-027962c8-2452-4537-9216-fc618df7c109 service nova] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Received event network-vif-deleted-951987e2-f8ec-4ab6-a168-7db5fd4bb37c {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 626.628498] env[68638]: DEBUG nova.compute.manager [req-df6acff5-0fcf-4ed2-916e-c40d51bebcde req-027962c8-2452-4537-9216-fc618df7c109 service nova] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Received event network-vif-deleted-5ee0473a-5eb7-4b42-b970-cb92565f8dd5 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 626.628498] env[68638]: DEBUG nova.compute.manager [req-df6acff5-0fcf-4ed2-916e-c40d51bebcde req-027962c8-2452-4537-9216-fc618df7c109 service nova] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Received event network-vif-deleted-cdd18e96-0a08-4bc0-9252-0044e54e0084 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 626.628498] env[68638]: INFO nova.compute.manager [req-df6acff5-0fcf-4ed2-916e-c40d51bebcde req-027962c8-2452-4537-9216-fc618df7c109 service nova] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Neutron deleted interface cdd18e96-0a08-4bc0-9252-0044e54e0084; detaching it from the instance and deleting it from the info cache [ 626.628745] env[68638]: DEBUG nova.network.neutron [req-df6acff5-0fcf-4ed2-916e-c40d51bebcde req-027962c8-2452-4537-9216-fc618df7c109 service nova] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.630842] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833126, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.633442] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Waiting for the task: (returnval){ [ 626.633442] env[68638]: value = "task-2833128" [ 626.633442] env[68638]: _type = "Task" [ 626.633442] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.635602] env[68638]: DEBUG oslo_vmware.api [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Waiting for the task: (returnval){ [ 626.635602] env[68638]: value = "task-2833127" [ 626.635602] env[68638]: _type = "Task" [ 626.635602] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.651683] env[68638]: DEBUG oslo_vmware.api [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Task: {'id': task-2833127, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.651946] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Task: {'id': task-2833128, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.655844] env[68638]: DEBUG nova.network.neutron [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Updating instance_info_cache with network_info: [{"id": "a4d5833f-aab1-4c4d-9651-ab0440b5d6dd", "address": "fa:16:3e:6a:4b:d0", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4d5833f-aa", "ovs_interfaceid": "a4d5833f-aab1-4c4d-9651-ab0440b5d6dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.661405] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Releasing lock "refresh_cache-7617a7b1-3b21-4d38-b090-1d35bc74637b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 626.738571] env[68638]: DEBUG nova.scheduler.client.report [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 626.808195] env[68638]: DEBUG oslo_vmware.api [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833125, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.126224] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833126, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.132292] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9f7bfbb3-8fb2-482c-94b3-83aefd653d54 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.149750] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-459d11bc-ec43-4345-8f6d-53f1cda4807b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.170740] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Releasing lock "refresh_cache-53571ad6-1fdb-4651-8b4d-24f35ffc815a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 627.171066] env[68638]: DEBUG nova.compute.manager [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Instance network_info: |[{"id": "a4d5833f-aab1-4c4d-9651-ab0440b5d6dd", "address": "fa:16:3e:6a:4b:d0", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4d5833f-aa", "ovs_interfaceid": "a4d5833f-aab1-4c4d-9651-ab0440b5d6dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 627.179185] env[68638]: DEBUG oslo_vmware.api [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Task: {'id': task-2833128, 'name': PowerOnVM_Task, 'duration_secs': 0.439399} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.179185] env[68638]: DEBUG oslo_vmware.api [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Task: {'id': task-2833127, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073119} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.179185] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6a:4b:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '205fb402-8eaf-4b61-8f57-8f216024179a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a4d5833f-aab1-4c4d-9651-ab0440b5d6dd', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 627.190478] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Creating folder: Project (482784eedfe64606b2a9b86e31f0a20f). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 627.191962] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 627.191962] env[68638]: DEBUG nova.compute.manager [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 627.191962] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 627.197109] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6289fc48-79b4-4a7b-966f-785dd6f52e83 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.199497] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f376e4e-a59a-4579-8e8e-34a815680e89 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.203253] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adfe6245-e95c-4523-9617-74f755c642bf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.222438] env[68638]: DEBUG nova.compute.manager [req-df6acff5-0fcf-4ed2-916e-c40d51bebcde req-027962c8-2452-4537-9216-fc618df7c109 service nova] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Detach interface failed, port_id=cdd18e96-0a08-4bc0-9252-0044e54e0084, reason: Instance 6cb1846a-02aa-4dc3-a573-858abf5a0bdf could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 627.226170] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9947527b-600f-4904-a07d-62cadff368f8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.236995] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Created folder: Project (482784eedfe64606b2a9b86e31f0a20f) in parent group-v569734. [ 627.237377] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Creating folder: Instances. Parent ref: group-v569791. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 627.237862] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3f637d18-9c6f-4049-9f90-b6b7537bcd60 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.267410] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.736s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 627.267770] env[68638]: DEBUG nova.compute.manager [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 627.283827] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] 421c377f-0b7a-457d-b5dd-50281c65122a/421c377f-0b7a-457d-b5dd-50281c65122a.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 627.285041] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.393s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 627.286688] env[68638]: INFO nova.compute.claims [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 627.289698] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61815d9b-e1f0-4603-ae88-49c22a4e822d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.293065] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc09f696-de62-4761-9e46-51b4874e3b7a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.313027] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Created folder: Instances in parent group-v569791. [ 627.313027] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 627.316230] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 627.319434] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bfe65bcc-322e-41e7-94f3-80307f73a33d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.334057] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Updating instance '7617a7b1-3b21-4d38-b090-1d35bc74637b' progress to 83 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 627.340779] env[68638]: DEBUG oslo_vmware.api [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Waiting for the task: (returnval){ [ 627.340779] env[68638]: value = "task-2833131" [ 627.340779] env[68638]: _type = "Task" [ 627.340779] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.350388] env[68638]: DEBUG oslo_vmware.api [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833125, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.351528] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 627.351528] env[68638]: value = "task-2833132" [ 627.351528] env[68638]: _type = "Task" [ 627.351528] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.358870] env[68638]: DEBUG oslo_vmware.api [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Task: {'id': task-2833131, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.364266] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833132, 'name': CreateVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.444621] env[68638]: DEBUG nova.compute.manager [req-b5066998-c7e0-4b31-97eb-9bd03c5f0e90 req-d114ab7b-380b-49db-b151-0854032da5bc service nova] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Received event network-changed-a4d5833f-aab1-4c4d-9651-ab0440b5d6dd {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 627.444734] env[68638]: DEBUG nova.compute.manager [req-b5066998-c7e0-4b31-97eb-9bd03c5f0e90 req-d114ab7b-380b-49db-b151-0854032da5bc service nova] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Refreshing instance network info cache due to event network-changed-a4d5833f-aab1-4c4d-9651-ab0440b5d6dd. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 627.445259] env[68638]: DEBUG oslo_concurrency.lockutils [req-b5066998-c7e0-4b31-97eb-9bd03c5f0e90 req-d114ab7b-380b-49db-b151-0854032da5bc service nova] Acquiring lock "refresh_cache-53571ad6-1fdb-4651-8b4d-24f35ffc815a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.445259] env[68638]: DEBUG oslo_concurrency.lockutils [req-b5066998-c7e0-4b31-97eb-9bd03c5f0e90 req-d114ab7b-380b-49db-b151-0854032da5bc service nova] Acquired lock "refresh_cache-53571ad6-1fdb-4651-8b4d-24f35ffc815a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.445446] env[68638]: DEBUG nova.network.neutron [req-b5066998-c7e0-4b31-97eb-9bd03c5f0e90 req-d114ab7b-380b-49db-b151-0854032da5bc service nova] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Refreshing network info cache for port a4d5833f-aab1-4c4d-9651-ab0440b5d6dd {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 627.630894] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833126, 'name': ReconfigVM_Task, 'duration_secs': 0.898783} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.631296] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Reconfigured VM instance instance-00000011 to attach disk [datastore1] 1946baab-bb48-4138-8db6-1f530e432c3d/1946baab-bb48-4138-8db6-1f530e432c3d.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 627.632019] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-74387069-0a16-4c8c-8c9e-35674410761b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.639198] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 627.639198] env[68638]: value = "task-2833133" [ 627.639198] env[68638]: _type = "Task" [ 627.639198] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.649330] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833133, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.748281] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 627.786221] env[68638]: DEBUG nova.compute.utils [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 627.787791] env[68638]: DEBUG nova.compute.manager [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 627.787916] env[68638]: DEBUG nova.network.neutron [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 627.821197] env[68638]: DEBUG oslo_vmware.api [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833125, 'name': CreateSnapshot_Task, 'duration_secs': 1.228265} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.822067] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Created Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 627.822880] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b1c255-f4f5-48b0-8bb4-8569583d23fe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.834222] env[68638]: DEBUG nova.policy [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd6d1fa0168446d199644ea8575c2efd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd94444f92bb741739e4b3f8dfb3244b2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 627.843805] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 627.844417] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-85a4ca6a-62c7-4680-b588-417e2077e348 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.854985] env[68638]: DEBUG oslo_vmware.api [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Task: {'id': task-2833131, 'name': ReconfigVM_Task, 'duration_secs': 0.447822} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.858906] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Reconfigured VM instance instance-00000012 to attach disk [datastore1] 421c377f-0b7a-457d-b5dd-50281c65122a/421c377f-0b7a-457d-b5dd-50281c65122a.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 627.859633] env[68638]: DEBUG oslo_vmware.api [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 627.859633] env[68638]: value = "task-2833134" [ 627.859633] env[68638]: _type = "Task" [ 627.859633] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.859827] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-564de00f-4129-48cf-b83c-9abbb9133b8f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.869024] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833132, 'name': CreateVM_Task, 'duration_secs': 0.433613} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.869456] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 627.870153] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.870288] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.870593] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 627.870836] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0b17567-b5ed-4f32-b7e1-2a1d4561674d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.877053] env[68638]: DEBUG oslo_vmware.api [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Waiting for the task: (returnval){ [ 627.877053] env[68638]: value = "task-2833135" [ 627.877053] env[68638]: _type = "Task" [ 627.877053] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.877264] env[68638]: DEBUG oslo_vmware.api [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833134, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.882423] env[68638]: DEBUG oslo_vmware.api [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Waiting for the task: (returnval){ [ 627.882423] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5236e627-7f77-db31-1c9b-00b6a50cb201" [ 627.882423] env[68638]: _type = "Task" [ 627.882423] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.888682] env[68638]: DEBUG oslo_vmware.api [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Task: {'id': task-2833135, 'name': Rename_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.894396] env[68638]: DEBUG oslo_vmware.api [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5236e627-7f77-db31-1c9b-00b6a50cb201, 'name': SearchDatastore_Task, 'duration_secs': 0.009948} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.894695] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 627.894939] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 627.895190] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.895333] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.895520] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 627.895788] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-436de9c9-74e7-472b-a919-49247d37e489 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.907301] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 627.907301] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 627.907471] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e8cc271-30f5-4149-8b82-f18f37117079 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.912703] env[68638]: DEBUG oslo_vmware.api [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Waiting for the task: (returnval){ [ 627.912703] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5227c0e6-ee2b-2a75-d42c-0b907691e690" [ 627.912703] env[68638]: _type = "Task" [ 627.912703] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.921041] env[68638]: DEBUG oslo_vmware.api [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5227c0e6-ee2b-2a75-d42c-0b907691e690, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.154136] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833133, 'name': Rename_Task, 'duration_secs': 0.191571} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.154460] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 628.156928] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d334bc4b-4153-49d2-83b2-34f938f51b38 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.163177] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 628.163177] env[68638]: value = "task-2833136" [ 628.163177] env[68638]: _type = "Task" [ 628.163177] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.173982] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833136, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.216528] env[68638]: DEBUG nova.network.neutron [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Successfully created port: aaad37b0-74f3-46c9-91b1-9f850314fcec {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 628.283912] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Acquiring lock "c71693e9-aeaa-4f12-b5cf-a179e558505d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 628.284253] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Lock "c71693e9-aeaa-4f12-b5cf-a179e558505d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 628.284551] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Acquiring lock "c71693e9-aeaa-4f12-b5cf-a179e558505d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 628.284798] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Lock "c71693e9-aeaa-4f12-b5cf-a179e558505d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 628.284992] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Lock "c71693e9-aeaa-4f12-b5cf-a179e558505d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 628.290786] env[68638]: DEBUG nova.compute.manager [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 628.293908] env[68638]: INFO nova.compute.manager [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Terminating instance [ 628.343484] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Creating linked-clone VM from snapshot {{(pid=68638) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 628.345918] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-cc3a63c4-965c-433d-8c61-3e89da9afc47 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.354251] env[68638]: DEBUG oslo_vmware.api [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Waiting for the task: (returnval){ [ 628.354251] env[68638]: value = "task-2833137" [ 628.354251] env[68638]: _type = "Task" [ 628.354251] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.364232] env[68638]: DEBUG oslo_vmware.api [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833137, 'name': CloneVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.368113] env[68638]: DEBUG nova.network.neutron [req-b5066998-c7e0-4b31-97eb-9bd03c5f0e90 req-d114ab7b-380b-49db-b151-0854032da5bc service nova] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Updated VIF entry in instance network info cache for port a4d5833f-aab1-4c4d-9651-ab0440b5d6dd. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 628.368443] env[68638]: DEBUG nova.network.neutron [req-b5066998-c7e0-4b31-97eb-9bd03c5f0e90 req-d114ab7b-380b-49db-b151-0854032da5bc service nova] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Updating instance_info_cache with network_info: [{"id": "a4d5833f-aab1-4c4d-9651-ab0440b5d6dd", "address": "fa:16:3e:6a:4b:d0", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4d5833f-aa", "ovs_interfaceid": "a4d5833f-aab1-4c4d-9651-ab0440b5d6dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.378679] env[68638]: DEBUG oslo_vmware.api [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833134, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.388290] env[68638]: DEBUG oslo_vmware.api [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Task: {'id': task-2833135, 'name': Rename_Task, 'duration_secs': 0.369168} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.389185] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 628.389438] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-02242038-a31b-4777-9b85-dcd2d51ff90d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.396241] env[68638]: DEBUG oslo_vmware.api [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Waiting for the task: (returnval){ [ 628.396241] env[68638]: value = "task-2833138" [ 628.396241] env[68638]: _type = "Task" [ 628.396241] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.408157] env[68638]: DEBUG oslo_vmware.api [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Task: {'id': task-2833138, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.423988] env[68638]: DEBUG oslo_vmware.api [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5227c0e6-ee2b-2a75-d42c-0b907691e690, 'name': SearchDatastore_Task, 'duration_secs': 0.010266} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.424710] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25bdf7e3-8697-4111-895d-70ecbe7a44f4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.431141] env[68638]: DEBUG oslo_vmware.api [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Waiting for the task: (returnval){ [ 628.431141] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5210854d-8773-79e0-989f-b904409ca873" [ 628.431141] env[68638]: _type = "Task" [ 628.431141] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.445209] env[68638]: DEBUG oslo_vmware.api [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5210854d-8773-79e0-989f-b904409ca873, 'name': SearchDatastore_Task, 'duration_secs': 0.01081} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.445641] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.446637] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 53571ad6-1fdb-4651-8b4d-24f35ffc815a/53571ad6-1fdb-4651-8b4d-24f35ffc815a.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 628.446637] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a7225e33-1ba7-438d-988e-0721b1fce075 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.453665] env[68638]: DEBUG oslo_vmware.api [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Waiting for the task: (returnval){ [ 628.453665] env[68638]: value = "task-2833139" [ 628.453665] env[68638]: _type = "Task" [ 628.453665] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.464883] env[68638]: DEBUG oslo_vmware.api [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Task: {'id': task-2833139, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.680607] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833136, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.800840] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Acquiring lock "refresh_cache-c71693e9-aeaa-4f12-b5cf-a179e558505d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.801047] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Acquired lock "refresh_cache-c71693e9-aeaa-4f12-b5cf-a179e558505d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 628.801289] env[68638]: DEBUG nova.network.neutron [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 628.865296] env[68638]: DEBUG oslo_vmware.api [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833137, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.874418] env[68638]: DEBUG oslo_vmware.api [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833134, 'name': PowerOnVM_Task, 'duration_secs': 0.647894} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.875740] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 628.875740] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b76fdb7b-02d5-4fd9-ada7-e6101b31fe64 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Updating instance '7617a7b1-3b21-4d38-b090-1d35bc74637b' progress to 100 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 628.881623] env[68638]: DEBUG oslo_concurrency.lockutils [req-b5066998-c7e0-4b31-97eb-9bd03c5f0e90 req-d114ab7b-380b-49db-b151-0854032da5bc service nova] Releasing lock "refresh_cache-53571ad6-1fdb-4651-8b4d-24f35ffc815a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.915881] env[68638]: DEBUG oslo_vmware.api [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Task: {'id': task-2833138, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.968804] env[68638]: DEBUG oslo_vmware.api [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Task: {'id': task-2833139, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.999761] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2825135-554f-4335-b95a-91382f4d8a3d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.010531] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c68d7f9c-0630-4f9d-856c-13722b820d95 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.066437] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb6ec35-1392-417e-b270-719ff239f33b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.076497] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b3e84b-d360-4177-8685-93468e1058c4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.090833] env[68638]: DEBUG nova.compute.provider_tree [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 629.180179] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833136, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.308121] env[68638]: DEBUG nova.compute.manager [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 629.341243] env[68638]: DEBUG nova.virt.hardware [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 629.342017] env[68638]: DEBUG nova.virt.hardware [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 629.342199] env[68638]: DEBUG nova.virt.hardware [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 629.342592] env[68638]: DEBUG nova.virt.hardware [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 629.342978] env[68638]: DEBUG nova.virt.hardware [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 629.342978] env[68638]: DEBUG nova.virt.hardware [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 629.343527] env[68638]: DEBUG nova.virt.hardware [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 629.343735] env[68638]: DEBUG nova.virt.hardware [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 629.343965] env[68638]: DEBUG nova.virt.hardware [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 629.345327] env[68638]: DEBUG nova.virt.hardware [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 629.345327] env[68638]: DEBUG nova.virt.hardware [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 629.345965] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-683588ec-1ec4-41cb-a4ff-dbc9839c8816 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.349986] env[68638]: DEBUG nova.network.neutron [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 629.361207] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68098566-873e-4592-b7e0-6abb55693fe6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.374636] env[68638]: DEBUG oslo_vmware.api [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833137, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.411555] env[68638]: DEBUG oslo_vmware.api [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Task: {'id': task-2833138, 'name': PowerOnVM_Task, 'duration_secs': 0.769408} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.411904] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 629.412218] env[68638]: INFO nova.compute.manager [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Took 8.07 seconds to spawn the instance on the hypervisor. [ 629.414292] env[68638]: DEBUG nova.compute.manager [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 629.414292] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f52efe42-0ed6-419f-8b29-954958f4d1f7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.466609] env[68638]: DEBUG oslo_vmware.api [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Task: {'id': task-2833139, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.7247} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.466609] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 53571ad6-1fdb-4651-8b4d-24f35ffc815a/53571ad6-1fdb-4651-8b4d-24f35ffc815a.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 629.466609] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 629.466609] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-86667140-f007-4330-8b56-47ae87ec4c0a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.471983] env[68638]: DEBUG oslo_vmware.api [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Waiting for the task: (returnval){ [ 629.471983] env[68638]: value = "task-2833140" [ 629.471983] env[68638]: _type = "Task" [ 629.471983] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.482528] env[68638]: DEBUG oslo_vmware.api [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Task: {'id': task-2833140, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.488724] env[68638]: DEBUG nova.network.neutron [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.597150] env[68638]: DEBUG nova.scheduler.client.report [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 629.679968] env[68638]: DEBUG oslo_vmware.api [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833136, 'name': PowerOnVM_Task, 'duration_secs': 1.440741} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.679968] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 629.679968] env[68638]: INFO nova.compute.manager [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Took 11.16 seconds to spawn the instance on the hypervisor. [ 629.679968] env[68638]: DEBUG nova.compute.manager [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 629.683722] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-577c5d71-e72d-4e8f-af43-5a00364547fd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.868239] env[68638]: DEBUG oslo_vmware.api [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833137, 'name': CloneVM_Task, 'duration_secs': 1.351758} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.868544] env[68638]: INFO nova.virt.vmwareapi.vmops [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Created linked-clone VM from snapshot [ 629.869494] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c19c67-09c2-48c7-9ac3-fe373554a795 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.877686] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Uploading image 0a659ca3-2412-4223-9a1b-1adc578f94a6 {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 629.910967] env[68638]: DEBUG oslo_vmware.rw_handles [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 629.910967] env[68638]: value = "vm-569795" [ 629.910967] env[68638]: _type = "VirtualMachine" [ 629.910967] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 629.911130] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-aa7c9691-f3f6-401f-bc51-bfed0235d7f0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.922813] env[68638]: DEBUG oslo_vmware.rw_handles [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Lease: (returnval){ [ 629.922813] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]520f95ed-45d2-c6f5-b84d-fb1771188530" [ 629.922813] env[68638]: _type = "HttpNfcLease" [ 629.922813] env[68638]: } obtained for exporting VM: (result){ [ 629.922813] env[68638]: value = "vm-569795" [ 629.922813] env[68638]: _type = "VirtualMachine" [ 629.922813] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 629.923150] env[68638]: DEBUG oslo_vmware.api [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Waiting for the lease: (returnval){ [ 629.923150] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]520f95ed-45d2-c6f5-b84d-fb1771188530" [ 629.923150] env[68638]: _type = "HttpNfcLease" [ 629.923150] env[68638]: } to be ready. {{(pid=68638) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 629.937415] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 629.937415] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]520f95ed-45d2-c6f5-b84d-fb1771188530" [ 629.937415] env[68638]: _type = "HttpNfcLease" [ 629.937415] env[68638]: } is initializing. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 629.937900] env[68638]: INFO nova.compute.manager [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Took 27.06 seconds to build instance. [ 629.981447] env[68638]: DEBUG oslo_vmware.api [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Task: {'id': task-2833140, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088288} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.982370] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 629.983218] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43f4198-b70b-4df4-94eb-d5c6a4dd4e13 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.006275] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Releasing lock "refresh_cache-c71693e9-aeaa-4f12-b5cf-a179e558505d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 630.006275] env[68638]: DEBUG nova.compute.manager [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 630.006275] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 630.017161] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Reconfiguring VM instance instance-00000013 to attach disk [datastore2] 53571ad6-1fdb-4651-8b4d-24f35ffc815a/53571ad6-1fdb-4651-8b4d-24f35ffc815a.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 630.018180] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e24dc268-7d68-4a7f-851d-a468cd07eab8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.021694] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7663d146-cd74-455c-8068-2e22542da6d8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.040682] env[68638]: DEBUG nova.network.neutron [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Successfully updated port: aaad37b0-74f3-46c9-91b1-9f850314fcec {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 630.043884] env[68638]: DEBUG nova.compute.manager [req-78351dbd-0576-4b3b-852e-2358fcf8cca3 req-51291ec0-2c3a-4569-931d-843afe6ea4b9 service nova] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Received event network-vif-plugged-aaad37b0-74f3-46c9-91b1-9f850314fcec {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 630.044241] env[68638]: DEBUG oslo_concurrency.lockutils [req-78351dbd-0576-4b3b-852e-2358fcf8cca3 req-51291ec0-2c3a-4569-931d-843afe6ea4b9 service nova] Acquiring lock "32efc578-2cf9-4b61-bbaa-aa7031a04e33-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.046117] env[68638]: DEBUG oslo_concurrency.lockutils [req-78351dbd-0576-4b3b-852e-2358fcf8cca3 req-51291ec0-2c3a-4569-931d-843afe6ea4b9 service nova] Lock "32efc578-2cf9-4b61-bbaa-aa7031a04e33-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 630.046117] env[68638]: DEBUG oslo_concurrency.lockutils [req-78351dbd-0576-4b3b-852e-2358fcf8cca3 req-51291ec0-2c3a-4569-931d-843afe6ea4b9 service nova] Lock "32efc578-2cf9-4b61-bbaa-aa7031a04e33-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 630.046117] env[68638]: DEBUG nova.compute.manager [req-78351dbd-0576-4b3b-852e-2358fcf8cca3 req-51291ec0-2c3a-4569-931d-843afe6ea4b9 service nova] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] No waiting events found dispatching network-vif-plugged-aaad37b0-74f3-46c9-91b1-9f850314fcec {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 630.046117] env[68638]: WARNING nova.compute.manager [req-78351dbd-0576-4b3b-852e-2358fcf8cca3 req-51291ec0-2c3a-4569-931d-843afe6ea4b9 service nova] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Received unexpected event network-vif-plugged-aaad37b0-74f3-46c9-91b1-9f850314fcec for instance with vm_state building and task_state spawning. [ 630.050819] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 630.052173] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-607b4bbb-b841-478f-9562-34079b099902 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.053860] env[68638]: DEBUG oslo_vmware.api [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Waiting for the task: (returnval){ [ 630.053860] env[68638]: value = "task-2833142" [ 630.053860] env[68638]: _type = "Task" [ 630.053860] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.059891] env[68638]: DEBUG oslo_vmware.api [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Waiting for the task: (returnval){ [ 630.059891] env[68638]: value = "task-2833143" [ 630.059891] env[68638]: _type = "Task" [ 630.059891] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.067350] env[68638]: DEBUG oslo_vmware.api [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Task: {'id': task-2833142, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.071352] env[68638]: DEBUG oslo_vmware.api [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833143, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.107234] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.822s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 630.107234] env[68638]: DEBUG nova.compute.manager [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 630.110034] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.791s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 630.113322] env[68638]: INFO nova.compute.claims [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 630.199299] env[68638]: INFO nova.compute.manager [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Took 29.70 seconds to build instance. [ 630.273703] env[68638]: DEBUG nova.compute.manager [None req-1dbefaef-478d-47e5-bc3d-f3976f754cd6 tempest-ServerDiagnosticsTest-1573295464 tempest-ServerDiagnosticsTest-1573295464-project-admin] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 630.275261] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49cfeb5-58c4-4e8c-a069-fcff33faf146 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.283553] env[68638]: INFO nova.compute.manager [None req-1dbefaef-478d-47e5-bc3d-f3976f754cd6 tempest-ServerDiagnosticsTest-1573295464 tempest-ServerDiagnosticsTest-1573295464-project-admin] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Retrieving diagnostics [ 630.284617] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e1f4f65-8a96-4c9c-945a-515553616077 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.438744] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 630.438744] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]520f95ed-45d2-c6f5-b84d-fb1771188530" [ 630.438744] env[68638]: _type = "HttpNfcLease" [ 630.438744] env[68638]: } is ready. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 630.439641] env[68638]: DEBUG oslo_vmware.rw_handles [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 630.439641] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]520f95ed-45d2-c6f5-b84d-fb1771188530" [ 630.439641] env[68638]: _type = "HttpNfcLease" [ 630.439641] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 630.440593] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dac8e602-b402-499e-b11e-b6c5a9f97a6e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.444134] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4deff31e-3787-4c4c-802f-028f79679a65 tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Lock "421c377f-0b7a-457d-b5dd-50281c65122a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.112s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 630.451700] env[68638]: DEBUG oslo_vmware.rw_handles [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5221efaf-5e51-4e21-756d-1fbc178d8636/disk-0.vmdk from lease info. {{(pid=68638) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 630.451899] env[68638]: DEBUG oslo_vmware.rw_handles [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5221efaf-5e51-4e21-756d-1fbc178d8636/disk-0.vmdk for reading. {{(pid=68638) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 630.547631] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Acquiring lock "refresh_cache-32efc578-2cf9-4b61-bbaa-aa7031a04e33" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.547631] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Acquired lock "refresh_cache-32efc578-2cf9-4b61-bbaa-aa7031a04e33" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 630.547631] env[68638]: DEBUG nova.network.neutron [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 630.555926] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1bb44694-6e31-4c5f-8aff-84c8140ed9c8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.571783] env[68638]: DEBUG oslo_vmware.api [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Task: {'id': task-2833142, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.576429] env[68638]: DEBUG oslo_vmware.api [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833143, 'name': PowerOffVM_Task, 'duration_secs': 0.28768} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.579032] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 630.579326] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 630.581071] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ff3de51b-e974-4995-aa7b-3cdd04ba4ebd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.610154] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 630.610311] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 630.610667] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Deleting the datastore file [datastore1] c71693e9-aeaa-4f12-b5cf-a179e558505d {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 630.611595] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c66aa201-df98-458a-b196-d313f39826d8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.618834] env[68638]: DEBUG nova.compute.utils [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 630.623424] env[68638]: DEBUG nova.compute.manager [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 630.623424] env[68638]: DEBUG nova.network.neutron [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 630.632085] env[68638]: DEBUG oslo_vmware.api [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Waiting for the task: (returnval){ [ 630.632085] env[68638]: value = "task-2833145" [ 630.632085] env[68638]: _type = "Task" [ 630.632085] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.643957] env[68638]: DEBUG oslo_vmware.api [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833145, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.694734] env[68638]: DEBUG nova.policy [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f7e9f64ef5ef4f2c9d8100ed55e7cbc8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '373459ee626847e9886e5ff353729280', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 630.702533] env[68638]: DEBUG oslo_concurrency.lockutils [None req-43d57c48-f27f-4e92-bd17-7b4546afefec tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "1946baab-bb48-4138-8db6-1f530e432c3d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.615s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 630.947795] env[68638]: DEBUG nova.compute.manager [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 631.074153] env[68638]: DEBUG oslo_vmware.api [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Task: {'id': task-2833142, 'name': ReconfigVM_Task, 'duration_secs': 0.647991} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.074640] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Reconfigured VM instance instance-00000013 to attach disk [datastore2] 53571ad6-1fdb-4651-8b4d-24f35ffc815a/53571ad6-1fdb-4651-8b4d-24f35ffc815a.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 631.075531] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e406488c-0fc1-4160-b930-be855e8a0d62 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.082988] env[68638]: DEBUG oslo_vmware.api [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Waiting for the task: (returnval){ [ 631.082988] env[68638]: value = "task-2833146" [ 631.082988] env[68638]: _type = "Task" [ 631.082988] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.098922] env[68638]: DEBUG oslo_vmware.api [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Task: {'id': task-2833146, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.100279] env[68638]: DEBUG nova.network.neutron [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 631.129651] env[68638]: DEBUG nova.compute.manager [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 631.135287] env[68638]: DEBUG oslo_concurrency.lockutils [None req-33873217-c3a3-4d59-8e85-be969a0ae34e tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "7617a7b1-3b21-4d38-b090-1d35bc74637b" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.136369] env[68638]: DEBUG oslo_concurrency.lockutils [None req-33873217-c3a3-4d59-8e85-be969a0ae34e tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "7617a7b1-3b21-4d38-b090-1d35bc74637b" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.136369] env[68638]: DEBUG nova.compute.manager [None req-33873217-c3a3-4d59-8e85-be969a0ae34e tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Going to confirm migration 1 {{(pid=68638) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 631.157058] env[68638]: DEBUG oslo_vmware.api [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Task: {'id': task-2833145, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131048} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.157404] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 631.158480] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 631.161920] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 631.161920] env[68638]: INFO nova.compute.manager [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Took 1.16 seconds to destroy the instance on the hypervisor. [ 631.162265] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 631.167590] env[68638]: DEBUG nova.compute.manager [-] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 631.168448] env[68638]: DEBUG nova.network.neutron [-] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 631.211436] env[68638]: DEBUG nova.compute.manager [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 631.269668] env[68638]: DEBUG nova.network.neutron [-] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 631.387240] env[68638]: DEBUG nova.network.neutron [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Updating instance_info_cache with network_info: [{"id": "aaad37b0-74f3-46c9-91b1-9f850314fcec", "address": "fa:16:3e:59:83:6f", "network": {"id": "ca367ad5-0507-4217-820a-b92e63df0cce", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1321393658-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d94444f92bb741739e4b3f8dfb3244b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37fb1918-d178-4e12-93e6-316381e78be4", "external-id": "nsx-vlan-transportzone-763", "segmentation_id": 763, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaaad37b0-74", "ovs_interfaceid": "aaad37b0-74f3-46c9-91b1-9f850314fcec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.477648] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.599772] env[68638]: DEBUG oslo_vmware.api [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Task: {'id': task-2833146, 'name': Rename_Task, 'duration_secs': 0.212102} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.600351] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 631.600915] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cdcd01fd-8ca7-4b92-9a84-9d7e7c0c818d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.603477] env[68638]: DEBUG nova.network.neutron [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Successfully created port: 67d928ea-035b-4725-a33a-b0a2a24e0af4 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 631.611600] env[68638]: DEBUG oslo_vmware.api [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Waiting for the task: (returnval){ [ 631.611600] env[68638]: value = "task-2833147" [ 631.611600] env[68638]: _type = "Task" [ 631.611600] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.620749] env[68638]: DEBUG oslo_vmware.api [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Task: {'id': task-2833147, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.733021] env[68638]: DEBUG oslo_concurrency.lockutils [None req-33873217-c3a3-4d59-8e85-be969a0ae34e tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "refresh_cache-7617a7b1-3b21-4d38-b090-1d35bc74637b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.733472] env[68638]: DEBUG oslo_concurrency.lockutils [None req-33873217-c3a3-4d59-8e85-be969a0ae34e tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquired lock "refresh_cache-7617a7b1-3b21-4d38-b090-1d35bc74637b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 631.733472] env[68638]: DEBUG nova.network.neutron [None req-33873217-c3a3-4d59-8e85-be969a0ae34e tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 631.733472] env[68638]: DEBUG nova.objects.instance [None req-33873217-c3a3-4d59-8e85-be969a0ae34e tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lazy-loading 'info_cache' on Instance uuid 7617a7b1-3b21-4d38-b090-1d35bc74637b {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 631.741012] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.773104] env[68638]: DEBUG nova.network.neutron [-] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.834864] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4029925-a27f-4ac9-97ef-78b8c5fc122b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.842904] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54fc8c8d-3419-4b9d-acde-f8fed1accd04 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.878296] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4063e493-1be4-4d61-92c4-5f9bc5368ec5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.886248] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab2351bd-1553-46f2-addb-a382d80d1bd7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.890758] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Releasing lock "refresh_cache-32efc578-2cf9-4b61-bbaa-aa7031a04e33" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 631.891102] env[68638]: DEBUG nova.compute.manager [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Instance network_info: |[{"id": "aaad37b0-74f3-46c9-91b1-9f850314fcec", "address": "fa:16:3e:59:83:6f", "network": {"id": "ca367ad5-0507-4217-820a-b92e63df0cce", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1321393658-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d94444f92bb741739e4b3f8dfb3244b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37fb1918-d178-4e12-93e6-316381e78be4", "external-id": "nsx-vlan-transportzone-763", "segmentation_id": 763, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaaad37b0-74", "ovs_interfaceid": "aaad37b0-74f3-46c9-91b1-9f850314fcec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 631.891509] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:83:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37fb1918-d178-4e12-93e6-316381e78be4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aaad37b0-74f3-46c9-91b1-9f850314fcec', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 631.902872] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Creating folder: Project (d94444f92bb741739e4b3f8dfb3244b2). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 631.903694] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3e5dd524-44a7-4e87-be63-c15796306c19 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.913527] env[68638]: DEBUG nova.compute.provider_tree [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 631.927552] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Created folder: Project (d94444f92bb741739e4b3f8dfb3244b2) in parent group-v569734. [ 631.927754] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Creating folder: Instances. Parent ref: group-v569796. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 631.928069] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-98384e09-90f2-4689-8ff5-c0d46a69bbc5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.938240] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Created folder: Instances in parent group-v569796. [ 631.938485] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 631.939161] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 631.939161] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0ca51606-7df2-4af5-a966-043e507e7683 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.959893] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Acquiring lock "421c377f-0b7a-457d-b5dd-50281c65122a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.960872] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Lock "421c377f-0b7a-457d-b5dd-50281c65122a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.960872] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Acquiring lock "421c377f-0b7a-457d-b5dd-50281c65122a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.960872] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Lock "421c377f-0b7a-457d-b5dd-50281c65122a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.961101] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Lock "421c377f-0b7a-457d-b5dd-50281c65122a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 631.963508] env[68638]: INFO nova.compute.manager [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Terminating instance [ 631.967557] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 631.967557] env[68638]: value = "task-2833150" [ 631.967557] env[68638]: _type = "Task" [ 631.967557] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.984676] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833150, 'name': CreateVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.126503] env[68638]: DEBUG oslo_vmware.api [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Task: {'id': task-2833147, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.140293] env[68638]: DEBUG nova.compute.manager [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 632.176483] env[68638]: DEBUG nova.virt.hardware [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 632.176749] env[68638]: DEBUG nova.virt.hardware [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 632.176913] env[68638]: DEBUG nova.virt.hardware [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 632.177129] env[68638]: DEBUG nova.virt.hardware [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 632.177294] env[68638]: DEBUG nova.virt.hardware [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 632.177454] env[68638]: DEBUG nova.virt.hardware [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 632.177677] env[68638]: DEBUG nova.virt.hardware [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 632.177838] env[68638]: DEBUG nova.virt.hardware [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 632.179922] env[68638]: DEBUG nova.virt.hardware [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 632.180186] env[68638]: DEBUG nova.virt.hardware [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 632.180376] env[68638]: DEBUG nova.virt.hardware [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 632.182031] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ecd4732-250c-4e1f-a75b-eaf2c5f7f25d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.189681] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493d7894-a15b-4bc7-950e-6214bd0ee28f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.275763] env[68638]: INFO nova.compute.manager [-] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Took 1.11 seconds to deallocate network for instance. [ 632.362728] env[68638]: DEBUG nova.compute.manager [req-608071e3-9493-42e7-9af3-d7250bbad48f req-9f5b28f9-8990-446a-bd2e-172ea57aeaee service nova] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Received event network-changed-aaad37b0-74f3-46c9-91b1-9f850314fcec {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 632.362930] env[68638]: DEBUG nova.compute.manager [req-608071e3-9493-42e7-9af3-d7250bbad48f req-9f5b28f9-8990-446a-bd2e-172ea57aeaee service nova] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Refreshing instance network info cache due to event network-changed-aaad37b0-74f3-46c9-91b1-9f850314fcec. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 632.363410] env[68638]: DEBUG oslo_concurrency.lockutils [req-608071e3-9493-42e7-9af3-d7250bbad48f req-9f5b28f9-8990-446a-bd2e-172ea57aeaee service nova] Acquiring lock "refresh_cache-32efc578-2cf9-4b61-bbaa-aa7031a04e33" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.363410] env[68638]: DEBUG oslo_concurrency.lockutils [req-608071e3-9493-42e7-9af3-d7250bbad48f req-9f5b28f9-8990-446a-bd2e-172ea57aeaee service nova] Acquired lock "refresh_cache-32efc578-2cf9-4b61-bbaa-aa7031a04e33" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 632.363549] env[68638]: DEBUG nova.network.neutron [req-608071e3-9493-42e7-9af3-d7250bbad48f req-9f5b28f9-8990-446a-bd2e-172ea57aeaee service nova] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Refreshing network info cache for port aaad37b0-74f3-46c9-91b1-9f850314fcec {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 632.421314] env[68638]: DEBUG nova.scheduler.client.report [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 632.468946] env[68638]: DEBUG nova.compute.manager [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 632.469205] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 632.470648] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e63f3b6-2d99-41a1-b1f2-c6ecd992683c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.489934] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833150, 'name': CreateVM_Task, 'duration_secs': 0.368729} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.495039] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 632.495039] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 632.495039] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.495039] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 632.495039] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 632.495330] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-295a7043-3d54-4815-a990-4e5de6ff7ccd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.496149] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7680922-e9d7-4d21-9363-7e0a7886adc5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.501521] env[68638]: DEBUG oslo_vmware.api [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Waiting for the task: (returnval){ [ 632.501521] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52eef98b-ad93-9a65-b537-7e1f51172c92" [ 632.501521] env[68638]: _type = "Task" [ 632.501521] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.503229] env[68638]: DEBUG oslo_vmware.api [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Waiting for the task: (returnval){ [ 632.503229] env[68638]: value = "task-2833151" [ 632.503229] env[68638]: _type = "Task" [ 632.503229] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.517144] env[68638]: DEBUG oslo_vmware.api [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52eef98b-ad93-9a65-b537-7e1f51172c92, 'name': SearchDatastore_Task, 'duration_secs': 0.01019} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.520454] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 632.520721] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 632.520957] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.521142] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 632.521360] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 632.521796] env[68638]: DEBUG oslo_vmware.api [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Task: {'id': task-2833151, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.522158] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-02786eb3-c904-4d4e-ba2b-744c246eb6c8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.530648] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 632.530867] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 632.531697] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f363fa24-683b-46ab-a46c-da0ecae85898 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.537373] env[68638]: DEBUG oslo_vmware.api [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Waiting for the task: (returnval){ [ 632.537373] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]523de8a5-e218-b990-b544-6e6b67883f4e" [ 632.537373] env[68638]: _type = "Task" [ 632.537373] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.546569] env[68638]: DEBUG oslo_vmware.api [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523de8a5-e218-b990-b544-6e6b67883f4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.623949] env[68638]: DEBUG oslo_vmware.api [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Task: {'id': task-2833147, 'name': PowerOnVM_Task, 'duration_secs': 0.69685} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.624949] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 632.625264] env[68638]: INFO nova.compute.manager [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Took 8.35 seconds to spawn the instance on the hypervisor. [ 632.625453] env[68638]: DEBUG nova.compute.manager [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 632.626392] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f658b96d-fc9b-47da-8516-197db2d1071a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.792513] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.926370] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.816s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 632.927208] env[68638]: DEBUG nova.compute.manager [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 632.932270] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.365s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.932981] env[68638]: DEBUG nova.objects.instance [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Lazy-loading 'resources' on Instance uuid 54af9c38-c8b6-4ef9-be63-de545dcc0da5 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 633.021881] env[68638]: DEBUG oslo_vmware.api [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Task: {'id': task-2833151, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.021881] env[68638]: DEBUG nova.compute.manager [None req-c4b9caaf-b023-43b9-83be-0dcd3908ac3a tempest-ServerExternalEventsTest-539306160 tempest-ServerExternalEventsTest-539306160-project] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Received event network-changed {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 633.022360] env[68638]: DEBUG nova.compute.manager [None req-c4b9caaf-b023-43b9-83be-0dcd3908ac3a tempest-ServerExternalEventsTest-539306160 tempest-ServerExternalEventsTest-539306160-project] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Refreshing instance network info cache due to event network-changed. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 633.022515] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c4b9caaf-b023-43b9-83be-0dcd3908ac3a tempest-ServerExternalEventsTest-539306160 tempest-ServerExternalEventsTest-539306160-project] Acquiring lock "refresh_cache-53571ad6-1fdb-4651-8b4d-24f35ffc815a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.022746] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c4b9caaf-b023-43b9-83be-0dcd3908ac3a tempest-ServerExternalEventsTest-539306160 tempest-ServerExternalEventsTest-539306160-project] Acquired lock "refresh_cache-53571ad6-1fdb-4651-8b4d-24f35ffc815a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 633.023000] env[68638]: DEBUG nova.network.neutron [None req-c4b9caaf-b023-43b9-83be-0dcd3908ac3a tempest-ServerExternalEventsTest-539306160 tempest-ServerExternalEventsTest-539306160-project] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 633.054159] env[68638]: DEBUG oslo_vmware.api [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523de8a5-e218-b990-b544-6e6b67883f4e, 'name': SearchDatastore_Task, 'duration_secs': 0.008193} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.060648] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-574ad490-4b4d-4e27-81f1-4cbfe081b2ee {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.073187] env[68638]: DEBUG oslo_vmware.api [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Waiting for the task: (returnval){ [ 633.073187] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]523ac009-5caa-696e-1fe9-66d50d3d4d2a" [ 633.073187] env[68638]: _type = "Task" [ 633.073187] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.082777] env[68638]: DEBUG oslo_vmware.api [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523ac009-5caa-696e-1fe9-66d50d3d4d2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.148888] env[68638]: INFO nova.compute.manager [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Took 29.94 seconds to build instance. [ 633.154643] env[68638]: DEBUG nova.network.neutron [req-608071e3-9493-42e7-9af3-d7250bbad48f req-9f5b28f9-8990-446a-bd2e-172ea57aeaee service nova] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Updated VIF entry in instance network info cache for port aaad37b0-74f3-46c9-91b1-9f850314fcec. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 633.155015] env[68638]: DEBUG nova.network.neutron [req-608071e3-9493-42e7-9af3-d7250bbad48f req-9f5b28f9-8990-446a-bd2e-172ea57aeaee service nova] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Updating instance_info_cache with network_info: [{"id": "aaad37b0-74f3-46c9-91b1-9f850314fcec", "address": "fa:16:3e:59:83:6f", "network": {"id": "ca367ad5-0507-4217-820a-b92e63df0cce", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1321393658-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d94444f92bb741739e4b3f8dfb3244b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37fb1918-d178-4e12-93e6-316381e78be4", "external-id": "nsx-vlan-transportzone-763", "segmentation_id": 763, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaaad37b0-74", "ovs_interfaceid": "aaad37b0-74f3-46c9-91b1-9f850314fcec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.209940] env[68638]: DEBUG nova.network.neutron [None req-33873217-c3a3-4d59-8e85-be969a0ae34e tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Updating instance_info_cache with network_info: [{"id": "41ce015b-dfb7-4031-a11b-8dfd0e29bb62", "address": "fa:16:3e:17:75:81", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.112", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41ce015b-df", "ovs_interfaceid": "41ce015b-dfb7-4031-a11b-8dfd0e29bb62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.440784] env[68638]: DEBUG nova.compute.utils [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 633.445450] env[68638]: DEBUG nova.compute.manager [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 633.445450] env[68638]: DEBUG nova.network.neutron [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 633.520784] env[68638]: DEBUG oslo_vmware.api [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Task: {'id': task-2833151, 'name': PowerOffVM_Task, 'duration_secs': 0.611672} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.521077] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 633.521251] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 633.521508] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7f4bcfb6-0d54-494a-b4c7-195c2c1f0c85 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.589634] env[68638]: DEBUG oslo_vmware.api [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523ac009-5caa-696e-1fe9-66d50d3d4d2a, 'name': SearchDatastore_Task, 'duration_secs': 0.01049} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.589882] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 633.590097] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 633.590322] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Deleting the datastore file [datastore1] 421c377f-0b7a-457d-b5dd-50281c65122a {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 633.593103] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 633.593374] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 32efc578-2cf9-4b61-bbaa-aa7031a04e33/32efc578-2cf9-4b61-bbaa-aa7031a04e33.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 633.593643] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ae5c4ff0-1da6-4980-a2c5-bc0b7e213d82 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.598103] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ded5196b-db43-4f05-a8b5-d714d46234d4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.606985] env[68638]: DEBUG oslo_vmware.api [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Waiting for the task: (returnval){ [ 633.606985] env[68638]: value = "task-2833153" [ 633.606985] env[68638]: _type = "Task" [ 633.606985] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.612736] env[68638]: DEBUG oslo_vmware.api [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Waiting for the task: (returnval){ [ 633.612736] env[68638]: value = "task-2833154" [ 633.612736] env[68638]: _type = "Task" [ 633.612736] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.625856] env[68638]: DEBUG nova.network.neutron [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Successfully updated port: 67d928ea-035b-4725-a33a-b0a2a24e0af4 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 633.640656] env[68638]: DEBUG oslo_vmware.api [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Task: {'id': task-2833153, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.644888] env[68638]: DEBUG oslo_vmware.api [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Task: {'id': task-2833154, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.650351] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f5286ef7-a332-4113-bff9-8db680db1d60 tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Lock "53571ad6-1fdb-4651-8b4d-24f35ffc815a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.862s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 633.659803] env[68638]: DEBUG oslo_concurrency.lockutils [req-608071e3-9493-42e7-9af3-d7250bbad48f req-9f5b28f9-8990-446a-bd2e-172ea57aeaee service nova] Releasing lock "refresh_cache-32efc578-2cf9-4b61-bbaa-aa7031a04e33" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 633.713219] env[68638]: DEBUG oslo_concurrency.lockutils [None req-33873217-c3a3-4d59-8e85-be969a0ae34e tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Releasing lock "refresh_cache-7617a7b1-3b21-4d38-b090-1d35bc74637b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 633.713473] env[68638]: DEBUG nova.objects.instance [None req-33873217-c3a3-4d59-8e85-be969a0ae34e tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lazy-loading 'migration_context' on Instance uuid 7617a7b1-3b21-4d38-b090-1d35bc74637b {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 633.808209] env[68638]: DEBUG nova.policy [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c1f3c7982b2c4f7595c53767e752512d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82f81cb755f54484a45c7732db68c4a7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 633.840028] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Acquiring lock "53571ad6-1fdb-4651-8b4d-24f35ffc815a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 633.840340] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Lock "53571ad6-1fdb-4651-8b4d-24f35ffc815a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 633.840556] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Acquiring lock "53571ad6-1fdb-4651-8b4d-24f35ffc815a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 633.840735] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Lock "53571ad6-1fdb-4651-8b4d-24f35ffc815a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 633.840974] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Lock "53571ad6-1fdb-4651-8b4d-24f35ffc815a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 633.847837] env[68638]: INFO nova.compute.manager [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Terminating instance [ 633.904226] env[68638]: DEBUG nova.network.neutron [None req-c4b9caaf-b023-43b9-83be-0dcd3908ac3a tempest-ServerExternalEventsTest-539306160 tempest-ServerExternalEventsTest-539306160-project] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Updating instance_info_cache with network_info: [{"id": "a4d5833f-aab1-4c4d-9651-ab0440b5d6dd", "address": "fa:16:3e:6a:4b:d0", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4d5833f-aa", "ovs_interfaceid": "a4d5833f-aab1-4c4d-9651-ab0440b5d6dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.947029] env[68638]: DEBUG nova.compute.manager [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 634.131993] env[68638]: DEBUG oslo_vmware.api [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Task: {'id': task-2833153, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151595} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.135348] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 634.135786] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 634.136018] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 634.136255] env[68638]: INFO nova.compute.manager [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Took 1.67 seconds to destroy the instance on the hypervisor. [ 634.136526] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 634.137120] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "refresh_cache-8fe9ba7e-021c-4b0f-a9ba-df7a6b753248" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.137295] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquired lock "refresh_cache-8fe9ba7e-021c-4b0f-a9ba-df7a6b753248" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 634.137544] env[68638]: DEBUG nova.network.neutron [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 634.142444] env[68638]: DEBUG nova.compute.manager [-] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 634.142714] env[68638]: DEBUG nova.network.neutron [-] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 634.144668] env[68638]: DEBUG oslo_vmware.api [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Task: {'id': task-2833154, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513632} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.145234] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 32efc578-2cf9-4b61-bbaa-aa7031a04e33/32efc578-2cf9-4b61-bbaa-aa7031a04e33.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 634.145495] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 634.146744] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-edc039a8-2ac5-4495-bcd0-6c55f4e26d48 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.152516] env[68638]: DEBUG oslo_vmware.api [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Waiting for the task: (returnval){ [ 634.152516] env[68638]: value = "task-2833155" [ 634.152516] env[68638]: _type = "Task" [ 634.152516] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.156845] env[68638]: DEBUG nova.compute.manager [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 634.166985] env[68638]: DEBUG oslo_vmware.api [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Task: {'id': task-2833155, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.202122] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35a8c62-d62c-4994-abcc-cea26175a603 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.214576] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb78a59d-2b4d-438a-9709-91fa6261b530 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.219278] env[68638]: DEBUG nova.objects.base [None req-33873217-c3a3-4d59-8e85-be969a0ae34e tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Object Instance<7617a7b1-3b21-4d38-b090-1d35bc74637b> lazy-loaded attributes: info_cache,migration_context {{(pid=68638) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 634.220592] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe8f2f8-15f9-4dc8-84e3-ff159488ae74 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.269843] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-959da969-c792-4c99-9908-edbbc0b8c9a1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.273896] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52be7f1f-a603-4611-a116-1ecd4744b047 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.280990] env[68638]: DEBUG oslo_vmware.api [None req-33873217-c3a3-4d59-8e85-be969a0ae34e tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 634.280990] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5239293e-73b5-91db-fa6b-509db4b802d4" [ 634.280990] env[68638]: _type = "Task" [ 634.280990] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.290292] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a73c9f59-62b3-4256-9241-14abcfcb3fe2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.302396] env[68638]: DEBUG oslo_vmware.api [None req-33873217-c3a3-4d59-8e85-be969a0ae34e tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5239293e-73b5-91db-fa6b-509db4b802d4, 'name': SearchDatastore_Task, 'duration_secs': 0.007277} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.313371] env[68638]: DEBUG oslo_concurrency.lockutils [None req-33873217-c3a3-4d59-8e85-be969a0ae34e tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 634.314437] env[68638]: DEBUG nova.compute.provider_tree [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 634.368437] env[68638]: DEBUG nova.compute.manager [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 634.368628] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 634.370282] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b49501-c663-4faf-8f35-78a74a0222a9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.380114] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 634.380381] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82ff9d2a-66c1-4a78-9373-8084c4278ae3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.387611] env[68638]: DEBUG oslo_vmware.api [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Waiting for the task: (returnval){ [ 634.387611] env[68638]: value = "task-2833156" [ 634.387611] env[68638]: _type = "Task" [ 634.387611] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.396445] env[68638]: DEBUG oslo_vmware.api [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Task: {'id': task-2833156, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.409036] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c4b9caaf-b023-43b9-83be-0dcd3908ac3a tempest-ServerExternalEventsTest-539306160 tempest-ServerExternalEventsTest-539306160-project] Releasing lock "refresh_cache-53571ad6-1fdb-4651-8b4d-24f35ffc815a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 634.571637] env[68638]: DEBUG nova.network.neutron [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Successfully created port: 9868aa77-d4cb-4432-9b96-1caa6f97fb36 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 634.668223] env[68638]: DEBUG oslo_vmware.api [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Task: {'id': task-2833155, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069484} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.670689] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 634.672077] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-196da61b-877f-4f15-ad36-c25e13af0de2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.710935] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Reconfiguring VM instance instance-00000014 to attach disk [datastore2] 32efc578-2cf9-4b61-bbaa-aa7031a04e33/32efc578-2cf9-4b61-bbaa-aa7031a04e33.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 634.712103] env[68638]: DEBUG nova.network.neutron [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 634.716667] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 634.716949] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-996f8654-e664-4509-b48e-128e49152e10 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.739973] env[68638]: DEBUG oslo_vmware.api [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Waiting for the task: (returnval){ [ 634.739973] env[68638]: value = "task-2833157" [ 634.739973] env[68638]: _type = "Task" [ 634.739973] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.747448] env[68638]: DEBUG oslo_vmware.api [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Task: {'id': task-2833157, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.773453] env[68638]: DEBUG nova.compute.manager [req-88ffa226-9fc3-4977-b051-a803c8364218 req-0cb984b0-d98e-497a-8242-4aab62620a59 service nova] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Received event network-vif-plugged-67d928ea-035b-4725-a33a-b0a2a24e0af4 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 634.773880] env[68638]: DEBUG oslo_concurrency.lockutils [req-88ffa226-9fc3-4977-b051-a803c8364218 req-0cb984b0-d98e-497a-8242-4aab62620a59 service nova] Acquiring lock "8fe9ba7e-021c-4b0f-a9ba-df7a6b753248-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 634.774251] env[68638]: DEBUG oslo_concurrency.lockutils [req-88ffa226-9fc3-4977-b051-a803c8364218 req-0cb984b0-d98e-497a-8242-4aab62620a59 service nova] Lock "8fe9ba7e-021c-4b0f-a9ba-df7a6b753248-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 634.774529] env[68638]: DEBUG oslo_concurrency.lockutils [req-88ffa226-9fc3-4977-b051-a803c8364218 req-0cb984b0-d98e-497a-8242-4aab62620a59 service nova] Lock "8fe9ba7e-021c-4b0f-a9ba-df7a6b753248-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 634.774946] env[68638]: DEBUG nova.compute.manager [req-88ffa226-9fc3-4977-b051-a803c8364218 req-0cb984b0-d98e-497a-8242-4aab62620a59 service nova] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] No waiting events found dispatching network-vif-plugged-67d928ea-035b-4725-a33a-b0a2a24e0af4 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 634.775292] env[68638]: WARNING nova.compute.manager [req-88ffa226-9fc3-4977-b051-a803c8364218 req-0cb984b0-d98e-497a-8242-4aab62620a59 service nova] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Received unexpected event network-vif-plugged-67d928ea-035b-4725-a33a-b0a2a24e0af4 for instance with vm_state building and task_state spawning. [ 634.775966] env[68638]: DEBUG nova.compute.manager [req-88ffa226-9fc3-4977-b051-a803c8364218 req-0cb984b0-d98e-497a-8242-4aab62620a59 service nova] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Received event network-changed-67d928ea-035b-4725-a33a-b0a2a24e0af4 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 634.776275] env[68638]: DEBUG nova.compute.manager [req-88ffa226-9fc3-4977-b051-a803c8364218 req-0cb984b0-d98e-497a-8242-4aab62620a59 service nova] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Refreshing instance network info cache due to event network-changed-67d928ea-035b-4725-a33a-b0a2a24e0af4. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 634.776563] env[68638]: DEBUG oslo_concurrency.lockutils [req-88ffa226-9fc3-4977-b051-a803c8364218 req-0cb984b0-d98e-497a-8242-4aab62620a59 service nova] Acquiring lock "refresh_cache-8fe9ba7e-021c-4b0f-a9ba-df7a6b753248" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.817970] env[68638]: DEBUG nova.scheduler.client.report [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 634.900365] env[68638]: DEBUG oslo_vmware.api [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Task: {'id': task-2833156, 'name': PowerOffVM_Task, 'duration_secs': 0.310073} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.900565] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 634.900825] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 634.901092] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4760e0c0-688f-40f6-b724-1eec91f407de {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.938012] env[68638]: DEBUG nova.network.neutron [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Updating instance_info_cache with network_info: [{"id": "67d928ea-035b-4725-a33a-b0a2a24e0af4", "address": "fa:16:3e:01:90:5a", "network": {"id": "2dc0e495-5a5a-47e0-8c1c-61e000194cc0", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-658812124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "373459ee626847e9886e5ff353729280", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67d928ea-03", "ovs_interfaceid": "67d928ea-035b-4725-a33a-b0a2a24e0af4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.956016] env[68638]: DEBUG nova.compute.manager [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 634.963324] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 634.963549] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 634.963745] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Deleting the datastore file [datastore2] 53571ad6-1fdb-4651-8b4d-24f35ffc815a {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 634.964043] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57c5dd2f-0b77-40ab-95c1-17f8ec7923a5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.970301] env[68638]: DEBUG oslo_vmware.api [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Waiting for the task: (returnval){ [ 634.970301] env[68638]: value = "task-2833159" [ 634.970301] env[68638]: _type = "Task" [ 634.970301] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.982619] env[68638]: DEBUG oslo_vmware.api [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Task: {'id': task-2833159, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.984949] env[68638]: DEBUG nova.virt.hardware [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 634.985299] env[68638]: DEBUG nova.virt.hardware [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 634.985401] env[68638]: DEBUG nova.virt.hardware [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 634.985662] env[68638]: DEBUG nova.virt.hardware [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 634.985809] env[68638]: DEBUG nova.virt.hardware [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 634.985948] env[68638]: DEBUG nova.virt.hardware [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 634.986166] env[68638]: DEBUG nova.virt.hardware [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 634.986317] env[68638]: DEBUG nova.virt.hardware [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 634.986479] env[68638]: DEBUG nova.virt.hardware [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 634.986631] env[68638]: DEBUG nova.virt.hardware [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 634.987167] env[68638]: DEBUG nova.virt.hardware [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 634.988058] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64fe0858-34ed-4391-a1d5-341c61726368 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.995303] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-589ebe01-71df-4ad5-aedd-220a314a843d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.023628] env[68638]: DEBUG nova.network.neutron [-] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.251250] env[68638]: DEBUG oslo_vmware.api [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Task: {'id': task-2833157, 'name': ReconfigVM_Task, 'duration_secs': 0.28452} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.251250] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Reconfigured VM instance instance-00000014 to attach disk [datastore2] 32efc578-2cf9-4b61-bbaa-aa7031a04e33/32efc578-2cf9-4b61-bbaa-aa7031a04e33.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 635.255041] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cdfa8f84-d5b7-4af8-98a1-7745488dcc4d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.261196] env[68638]: DEBUG oslo_vmware.api [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Waiting for the task: (returnval){ [ 635.261196] env[68638]: value = "task-2833160" [ 635.261196] env[68638]: _type = "Task" [ 635.261196] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.273740] env[68638]: DEBUG oslo_vmware.api [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Task: {'id': task-2833160, 'name': Rename_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.323465] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.391s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 635.326172] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.737s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.327734] env[68638]: INFO nova.compute.claims [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 635.353427] env[68638]: INFO nova.scheduler.client.report [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Deleted allocations for instance 54af9c38-c8b6-4ef9-be63-de545dcc0da5 [ 635.439743] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Releasing lock "refresh_cache-8fe9ba7e-021c-4b0f-a9ba-df7a6b753248" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 635.440095] env[68638]: DEBUG nova.compute.manager [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Instance network_info: |[{"id": "67d928ea-035b-4725-a33a-b0a2a24e0af4", "address": "fa:16:3e:01:90:5a", "network": {"id": "2dc0e495-5a5a-47e0-8c1c-61e000194cc0", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-658812124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "373459ee626847e9886e5ff353729280", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67d928ea-03", "ovs_interfaceid": "67d928ea-035b-4725-a33a-b0a2a24e0af4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 635.440700] env[68638]: DEBUG oslo_concurrency.lockutils [req-88ffa226-9fc3-4977-b051-a803c8364218 req-0cb984b0-d98e-497a-8242-4aab62620a59 service nova] Acquired lock "refresh_cache-8fe9ba7e-021c-4b0f-a9ba-df7a6b753248" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 635.440971] env[68638]: DEBUG nova.network.neutron [req-88ffa226-9fc3-4977-b051-a803c8364218 req-0cb984b0-d98e-497a-8242-4aab62620a59 service nova] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Refreshing network info cache for port 67d928ea-035b-4725-a33a-b0a2a24e0af4 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 635.443570] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:90:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa8c2f93-f287-41b3-adb6-4942a7ea2a0b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67d928ea-035b-4725-a33a-b0a2a24e0af4', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 635.450405] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 635.451166] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 635.451402] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-31063761-76f5-43a3-9a51-f52fae45f170 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.472551] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 635.472551] env[68638]: value = "task-2833161" [ 635.472551] env[68638]: _type = "Task" [ 635.472551] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.484692] env[68638]: DEBUG oslo_vmware.api [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Task: {'id': task-2833159, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172486} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.488074] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 635.488192] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 635.488314] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 635.488484] env[68638]: INFO nova.compute.manager [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Took 1.12 seconds to destroy the instance on the hypervisor. [ 635.488719] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 635.488888] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833161, 'name': CreateVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.489410] env[68638]: DEBUG nova.compute.manager [-] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 635.489665] env[68638]: DEBUG nova.network.neutron [-] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 635.529050] env[68638]: INFO nova.compute.manager [-] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Took 1.39 seconds to deallocate network for instance. [ 635.771773] env[68638]: DEBUG oslo_vmware.api [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Task: {'id': task-2833160, 'name': Rename_Task, 'duration_secs': 0.141756} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.772112] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 635.776302] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-83e60373-604a-4e01-83f8-a0312b021d7c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.782380] env[68638]: DEBUG oslo_vmware.api [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Waiting for the task: (returnval){ [ 635.782380] env[68638]: value = "task-2833162" [ 635.782380] env[68638]: _type = "Task" [ 635.782380] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.801808] env[68638]: DEBUG oslo_vmware.api [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Task: {'id': task-2833162, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.861468] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0dd1d391-f012-4728-8bb2-2e377f1e08b4 tempest-ServerTagsTestJSON-1149026568 tempest-ServerTagsTestJSON-1149026568-project-member] Lock "54af9c38-c8b6-4ef9-be63-de545dcc0da5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.419s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 635.988091] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833161, 'name': CreateVM_Task, 'duration_secs': 0.312506} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.988466] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 635.989274] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.989582] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 635.990024] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 635.990579] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20fe0f1a-8c86-4617-af4f-70d6f5ade99c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.996575] env[68638]: DEBUG oslo_vmware.api [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 635.996575] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]529197d0-1c26-37ce-8e06-4f65df1abad6" [ 635.996575] env[68638]: _type = "Task" [ 635.996575] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.009953] env[68638]: DEBUG oslo_vmware.api [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]529197d0-1c26-37ce-8e06-4f65df1abad6, 'name': SearchDatastore_Task, 'duration_secs': 0.008853} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.010282] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 636.010535] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 636.011094] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.011094] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 636.011094] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 636.011304] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-784a9de3-1048-45e6-8975-43c3325397ed {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.020872] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 636.021112] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 636.022066] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-594235d0-8cb7-4ab5-9b46-c15e5f15b04f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.028506] env[68638]: DEBUG oslo_vmware.api [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 636.028506] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d521f2-bd3b-b37f-a402-94a0e24550b6" [ 636.028506] env[68638]: _type = "Task" [ 636.028506] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.037152] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 636.043820] env[68638]: DEBUG oslo_vmware.api [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d521f2-bd3b-b37f-a402-94a0e24550b6, 'name': SearchDatastore_Task, 'duration_secs': 0.009644} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.044665] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-759038df-dd94-4e24-8921-89b81c1b109c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.050258] env[68638]: DEBUG oslo_vmware.api [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 636.050258] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52c8c93f-3c76-3380-7eeb-01b519622f88" [ 636.050258] env[68638]: _type = "Task" [ 636.050258] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.060728] env[68638]: DEBUG oslo_vmware.api [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52c8c93f-3c76-3380-7eeb-01b519622f88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.251253] env[68638]: DEBUG nova.network.neutron [req-88ffa226-9fc3-4977-b051-a803c8364218 req-0cb984b0-d98e-497a-8242-4aab62620a59 service nova] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Updated VIF entry in instance network info cache for port 67d928ea-035b-4725-a33a-b0a2a24e0af4. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 636.251253] env[68638]: DEBUG nova.network.neutron [req-88ffa226-9fc3-4977-b051-a803c8364218 req-0cb984b0-d98e-497a-8242-4aab62620a59 service nova] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Updating instance_info_cache with network_info: [{"id": "67d928ea-035b-4725-a33a-b0a2a24e0af4", "address": "fa:16:3e:01:90:5a", "network": {"id": "2dc0e495-5a5a-47e0-8c1c-61e000194cc0", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-658812124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "373459ee626847e9886e5ff353729280", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67d928ea-03", "ovs_interfaceid": "67d928ea-035b-4725-a33a-b0a2a24e0af4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.295215] env[68638]: DEBUG oslo_vmware.api [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Task: {'id': task-2833162, 'name': PowerOnVM_Task, 'duration_secs': 0.483157} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.295656] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 636.296229] env[68638]: INFO nova.compute.manager [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Took 6.99 seconds to spawn the instance on the hypervisor. [ 636.296628] env[68638]: DEBUG nova.compute.manager [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 636.298620] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c049b51d-d72a-4711-8be5-97734eac5050 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.370837] env[68638]: DEBUG nova.network.neutron [-] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.561220] env[68638]: DEBUG oslo_vmware.api [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52c8c93f-3c76-3380-7eeb-01b519622f88, 'name': SearchDatastore_Task, 'duration_secs': 0.008731} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.564368] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 636.564722] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248/8fe9ba7e-021c-4b0f-a9ba-df7a6b753248.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 636.565596] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-22bcc5ad-21f2-442a-860c-b99f3fcf9002 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.572810] env[68638]: DEBUG oslo_vmware.api [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 636.572810] env[68638]: value = "task-2833163" [ 636.572810] env[68638]: _type = "Task" [ 636.572810] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.591928] env[68638]: DEBUG oslo_vmware.api [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833163, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.760857] env[68638]: DEBUG oslo_concurrency.lockutils [req-88ffa226-9fc3-4977-b051-a803c8364218 req-0cb984b0-d98e-497a-8242-4aab62620a59 service nova] Releasing lock "refresh_cache-8fe9ba7e-021c-4b0f-a9ba-df7a6b753248" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 636.822347] env[68638]: INFO nova.compute.manager [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Took 32.10 seconds to build instance. [ 636.852884] env[68638]: DEBUG nova.compute.manager [req-39638f47-1c37-4a71-baad-2440298ec9af req-4da7fbcc-18b3-472b-bd8b-a96fdb1d3864 service nova] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Received event network-vif-plugged-9868aa77-d4cb-4432-9b96-1caa6f97fb36 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 636.852884] env[68638]: DEBUG oslo_concurrency.lockutils [req-39638f47-1c37-4a71-baad-2440298ec9af req-4da7fbcc-18b3-472b-bd8b-a96fdb1d3864 service nova] Acquiring lock "be761cf1-0949-42c0-8a38-58af33113a03-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 636.852884] env[68638]: DEBUG oslo_concurrency.lockutils [req-39638f47-1c37-4a71-baad-2440298ec9af req-4da7fbcc-18b3-472b-bd8b-a96fdb1d3864 service nova] Lock "be761cf1-0949-42c0-8a38-58af33113a03-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 636.852884] env[68638]: DEBUG oslo_concurrency.lockutils [req-39638f47-1c37-4a71-baad-2440298ec9af req-4da7fbcc-18b3-472b-bd8b-a96fdb1d3864 service nova] Lock "be761cf1-0949-42c0-8a38-58af33113a03-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 636.853170] env[68638]: DEBUG nova.compute.manager [req-39638f47-1c37-4a71-baad-2440298ec9af req-4da7fbcc-18b3-472b-bd8b-a96fdb1d3864 service nova] [instance: be761cf1-0949-42c0-8a38-58af33113a03] No waiting events found dispatching network-vif-plugged-9868aa77-d4cb-4432-9b96-1caa6f97fb36 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 636.853238] env[68638]: WARNING nova.compute.manager [req-39638f47-1c37-4a71-baad-2440298ec9af req-4da7fbcc-18b3-472b-bd8b-a96fdb1d3864 service nova] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Received unexpected event network-vif-plugged-9868aa77-d4cb-4432-9b96-1caa6f97fb36 for instance with vm_state building and task_state spawning. [ 636.875045] env[68638]: INFO nova.compute.manager [-] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Took 1.39 seconds to deallocate network for instance. [ 636.990099] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df441f89-905e-4dff-af35-e7e181e3fe72 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.999487] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b465903-bce9-4094-bebf-319fd8d39985 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.046928] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f5d621-206d-487d-a1ad-86d9b581b716 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.058027] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c8fbc8-5863-46dd-a38e-818220b93a47 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.071154] env[68638]: DEBUG nova.compute.provider_tree [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 637.083434] env[68638]: DEBUG oslo_vmware.api [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833163, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.115166] env[68638]: DEBUG nova.network.neutron [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Successfully updated port: 9868aa77-d4cb-4432-9b96-1caa6f97fb36 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 637.327851] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cfcaa5fe-0d94-4c62-8f83-df0e90d2d48a tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Lock "32efc578-2cf9-4b61-bbaa-aa7031a04e33" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.285s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 637.386892] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 637.537025] env[68638]: DEBUG nova.compute.manager [req-51c21245-8774-4b28-a1a7-f5da8c997f41 req-30f28551-d002-417c-b9e2-ebd558b1b2c3 service nova] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Received event network-vif-deleted-1125034b-dba0-498d-89a1-db63e8fd03ad {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 637.537205] env[68638]: DEBUG nova.compute.manager [req-51c21245-8774-4b28-a1a7-f5da8c997f41 req-30f28551-d002-417c-b9e2-ebd558b1b2c3 service nova] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Received event network-vif-deleted-a4d5833f-aab1-4c4d-9651-ab0440b5d6dd {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 637.579050] env[68638]: DEBUG nova.scheduler.client.report [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 637.594364] env[68638]: DEBUG oslo_vmware.api [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833163, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.561522} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.594364] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248/8fe9ba7e-021c-4b0f-a9ba-df7a6b753248.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 637.594364] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 637.594682] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-162d4cc4-a63d-4689-8b86-fbf9bd676dd5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.603943] env[68638]: DEBUG oslo_vmware.api [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 637.603943] env[68638]: value = "task-2833164" [ 637.603943] env[68638]: _type = "Task" [ 637.603943] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.611230] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Acquiring lock "refresh_cache-be761cf1-0949-42c0-8a38-58af33113a03" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.611405] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Acquired lock "refresh_cache-be761cf1-0949-42c0-8a38-58af33113a03" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 637.611660] env[68638]: DEBUG nova.network.neutron [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 637.619243] env[68638]: DEBUG oslo_vmware.api [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833164, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.832771] env[68638]: DEBUG nova.compute.manager [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 638.087669] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.761s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 638.088357] env[68638]: DEBUG nova.compute.manager [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 638.092068] env[68638]: DEBUG oslo_concurrency.lockutils [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 22.423s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 638.092273] env[68638]: DEBUG nova.objects.instance [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68638) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 638.121027] env[68638]: DEBUG oslo_vmware.api [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833164, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.102789} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.125825] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 638.126846] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2122a1fe-a791-44dc-a122-93b7517b4b96 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.155938] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Reconfiguring VM instance instance-00000015 to attach disk [datastore2] 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248/8fe9ba7e-021c-4b0f-a9ba-df7a6b753248.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 638.157570] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-adf61bf2-c6bf-4b26-9df6-ef5a40d0ec42 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.180741] env[68638]: DEBUG oslo_concurrency.lockutils [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquiring lock "20f2c343-1f32-4c36-b4a9-8f009b6ac326" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.180989] env[68638]: DEBUG oslo_concurrency.lockutils [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Lock "20f2c343-1f32-4c36-b4a9-8f009b6ac326" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 638.185430] env[68638]: DEBUG oslo_vmware.api [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 638.185430] env[68638]: value = "task-2833165" [ 638.185430] env[68638]: _type = "Task" [ 638.185430] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.186233] env[68638]: DEBUG nova.network.neutron [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 638.197645] env[68638]: DEBUG oslo_vmware.api [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833165, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.360286] env[68638]: DEBUG oslo_concurrency.lockutils [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.391881] env[68638]: DEBUG nova.network.neutron [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Updating instance_info_cache with network_info: [{"id": "9868aa77-d4cb-4432-9b96-1caa6f97fb36", "address": "fa:16:3e:8b:96:54", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.204", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9868aa77-d4", "ovs_interfaceid": "9868aa77-d4cb-4432-9b96-1caa6f97fb36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.593918] env[68638]: DEBUG nova.compute.utils [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 638.598975] env[68638]: DEBUG nova.compute.manager [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 638.599225] env[68638]: DEBUG nova.network.neutron [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 638.679063] env[68638]: DEBUG nova.policy [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec395966626843e3a6f7d3e34e054a06', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b9ffb656ebf844d4b71f49b35a594d4c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 638.697955] env[68638]: DEBUG oslo_vmware.api [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833165, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.894500] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Releasing lock "refresh_cache-be761cf1-0949-42c0-8a38-58af33113a03" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 638.894837] env[68638]: DEBUG nova.compute.manager [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Instance network_info: |[{"id": "9868aa77-d4cb-4432-9b96-1caa6f97fb36", "address": "fa:16:3e:8b:96:54", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.204", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9868aa77-d4", "ovs_interfaceid": "9868aa77-d4cb-4432-9b96-1caa6f97fb36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 638.895318] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:96:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '205fb402-8eaf-4b61-8f57-8f216024179a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9868aa77-d4cb-4432-9b96-1caa6f97fb36', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 638.904999] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Creating folder: Project (82f81cb755f54484a45c7732db68c4a7). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 638.905385] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0d6a6782-ef22-4742-8a8a-cd316a87512f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.925601] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Created folder: Project (82f81cb755f54484a45c7732db68c4a7) in parent group-v569734. [ 638.925873] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Creating folder: Instances. Parent ref: group-v569800. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 638.929261] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-69fce0a9-fca4-4c40-ae93-215f3c3e172f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.936954] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Created folder: Instances in parent group-v569800. [ 638.937260] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 638.937432] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 638.937643] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-abb62b88-019b-457f-a773-b6e879c65777 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.958640] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 638.958640] env[68638]: value = "task-2833168" [ 638.958640] env[68638]: _type = "Task" [ 638.958640] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.966738] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833168, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.106220] env[68638]: DEBUG nova.compute.manager [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 639.113136] env[68638]: DEBUG oslo_concurrency.lockutils [None req-851a7b2a-555b-4b32-813c-982fb00d3579 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 639.113385] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.382s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 639.113617] env[68638]: DEBUG nova.objects.instance [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Lazy-loading 'resources' on Instance uuid a5e993de-7aad-4b34-8946-563dc69a6f25 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 639.204022] env[68638]: DEBUG oslo_vmware.api [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833165, 'name': ReconfigVM_Task, 'duration_secs': 0.52024} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.204345] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Reconfigured VM instance instance-00000015 to attach disk [datastore2] 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248/8fe9ba7e-021c-4b0f-a9ba-df7a6b753248.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 639.205074] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aded4d77-ed72-4f63-93a3-00d876231119 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.212189] env[68638]: DEBUG oslo_vmware.api [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 639.212189] env[68638]: value = "task-2833169" [ 639.212189] env[68638]: _type = "Task" [ 639.212189] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.218998] env[68638]: DEBUG nova.network.neutron [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Successfully created port: 6a0a18ba-0a6c-47d5-9f8a-f4dced2c324f {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 639.227215] env[68638]: DEBUG oslo_vmware.api [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833169, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.469213] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833168, 'name': CreateVM_Task, 'duration_secs': 0.365621} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.469616] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 639.470131] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.470334] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 639.470618] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 639.470889] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cb0a960-c0ff-40a5-815d-da60df07d8e9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.476191] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Waiting for the task: (returnval){ [ 639.476191] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52a64412-7d18-432a-f0cc-cccdb87164f7" [ 639.476191] env[68638]: _type = "Task" [ 639.476191] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.485451] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a64412-7d18-432a-f0cc-cccdb87164f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.721540] env[68638]: DEBUG oslo_vmware.api [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833169, 'name': Rename_Task, 'duration_secs': 0.219355} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.721883] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 639.722157] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bcb37a89-ebe1-4760-9cb6-2882ebbf7b3c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.734107] env[68638]: DEBUG oslo_vmware.api [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 639.734107] env[68638]: value = "task-2833170" [ 639.734107] env[68638]: _type = "Task" [ 639.734107] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.742347] env[68638]: DEBUG oslo_vmware.api [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833170, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.790275] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "24982641-40ec-4fab-8385-1bc9dea6ade1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 639.791045] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "24982641-40ec-4fab-8385-1bc9dea6ade1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 639.870886] env[68638]: DEBUG oslo_concurrency.lockutils [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquiring lock "b9736ec5-6332-4202-95d6-a3cd1d1f11d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 639.871021] env[68638]: DEBUG oslo_concurrency.lockutils [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Lock "b9736ec5-6332-4202-95d6-a3cd1d1f11d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 639.927973] env[68638]: DEBUG nova.compute.manager [req-7c162ff5-3d2b-4a17-8f02-4896a64290d0 req-a95e9d6f-4ccc-4476-a52b-ce2229e08496 service nova] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Received event network-changed-9868aa77-d4cb-4432-9b96-1caa6f97fb36 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 639.928200] env[68638]: DEBUG nova.compute.manager [req-7c162ff5-3d2b-4a17-8f02-4896a64290d0 req-a95e9d6f-4ccc-4476-a52b-ce2229e08496 service nova] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Refreshing instance network info cache due to event network-changed-9868aa77-d4cb-4432-9b96-1caa6f97fb36. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 639.928418] env[68638]: DEBUG oslo_concurrency.lockutils [req-7c162ff5-3d2b-4a17-8f02-4896a64290d0 req-a95e9d6f-4ccc-4476-a52b-ce2229e08496 service nova] Acquiring lock "refresh_cache-be761cf1-0949-42c0-8a38-58af33113a03" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.928561] env[68638]: DEBUG oslo_concurrency.lockutils [req-7c162ff5-3d2b-4a17-8f02-4896a64290d0 req-a95e9d6f-4ccc-4476-a52b-ce2229e08496 service nova] Acquired lock "refresh_cache-be761cf1-0949-42c0-8a38-58af33113a03" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 639.928716] env[68638]: DEBUG nova.network.neutron [req-7c162ff5-3d2b-4a17-8f02-4896a64290d0 req-a95e9d6f-4ccc-4476-a52b-ce2229e08496 service nova] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Refreshing network info cache for port 9868aa77-d4cb-4432-9b96-1caa6f97fb36 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 639.987646] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a64412-7d18-432a-f0cc-cccdb87164f7, 'name': SearchDatastore_Task, 'duration_secs': 0.015837} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.987646] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 639.987875] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 639.988031] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.988186] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 639.988362] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 639.988642] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b34c7469-07f0-480b-bb70-20f8ad229b9f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.005163] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 640.005358] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 640.006150] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfaf9a21-aa30-4bde-bf8e-ebe1f12f9783 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.016357] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Waiting for the task: (returnval){ [ 640.016357] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52b0e70f-c9a3-8ada-e83d-933417e2ef71" [ 640.016357] env[68638]: _type = "Task" [ 640.016357] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.026241] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b0e70f-c9a3-8ada-e83d-933417e2ef71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.126237] env[68638]: DEBUG nova.compute.manager [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 640.172685] env[68638]: DEBUG nova.virt.hardware [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 640.172912] env[68638]: DEBUG nova.virt.hardware [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 640.173119] env[68638]: DEBUG nova.virt.hardware [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 640.173322] env[68638]: DEBUG nova.virt.hardware [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 640.173468] env[68638]: DEBUG nova.virt.hardware [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 640.173616] env[68638]: DEBUG nova.virt.hardware [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 640.173826] env[68638]: DEBUG nova.virt.hardware [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 640.174010] env[68638]: DEBUG nova.virt.hardware [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 640.174280] env[68638]: DEBUG nova.virt.hardware [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 640.174652] env[68638]: DEBUG nova.virt.hardware [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 640.174652] env[68638]: DEBUG nova.virt.hardware [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 640.175561] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c69c10e-24e0-425a-af48-026289ea8700 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.192081] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe9c6f3-9a2e-4173-8da2-d8dac097fbcf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.237455] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dde3d9e-5975-453b-a880-d5844d1c0fc5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.243625] env[68638]: DEBUG oslo_vmware.api [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833170, 'name': PowerOnVM_Task} progress is 87%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.250716] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c95bcc-dc24-4d08-82f4-ec3120269058 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.282996] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd6abbf1-f653-45c3-a65b-736eed62ddcc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.290955] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e271974b-6896-4d0d-8d03-1ba1fa1179b7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.304534] env[68638]: DEBUG nova.compute.provider_tree [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 640.531819] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b0e70f-c9a3-8ada-e83d-933417e2ef71, 'name': SearchDatastore_Task, 'duration_secs': 0.01862} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.532861] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-939472ab-18ab-4787-8c31-a1e9c86b8d47 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.540567] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Waiting for the task: (returnval){ [ 640.540567] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52b1f29d-2737-38b3-4eb6-85d4eb8e4c44" [ 640.540567] env[68638]: _type = "Task" [ 640.540567] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.549312] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b1f29d-2737-38b3-4eb6-85d4eb8e4c44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.669762] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Acquiring lock "06a1a44f-35ee-45d2-9503-23468150b72f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 640.670261] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Lock "06a1a44f-35ee-45d2-9503-23468150b72f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 640.709409] env[68638]: DEBUG nova.network.neutron [req-7c162ff5-3d2b-4a17-8f02-4896a64290d0 req-a95e9d6f-4ccc-4476-a52b-ce2229e08496 service nova] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Updated VIF entry in instance network info cache for port 9868aa77-d4cb-4432-9b96-1caa6f97fb36. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 640.709409] env[68638]: DEBUG nova.network.neutron [req-7c162ff5-3d2b-4a17-8f02-4896a64290d0 req-a95e9d6f-4ccc-4476-a52b-ce2229e08496 service nova] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Updating instance_info_cache with network_info: [{"id": "9868aa77-d4cb-4432-9b96-1caa6f97fb36", "address": "fa:16:3e:8b:96:54", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.204", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9868aa77-d4", "ovs_interfaceid": "9868aa77-d4cb-4432-9b96-1caa6f97fb36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.739892] env[68638]: DEBUG oslo_vmware.rw_handles [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5221efaf-5e51-4e21-756d-1fbc178d8636/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 640.741212] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013c86b3-3033-4a65-a7c3-a19606d19d45 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.747507] env[68638]: DEBUG oslo_vmware.rw_handles [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5221efaf-5e51-4e21-756d-1fbc178d8636/disk-0.vmdk is in state: ready. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 640.747697] env[68638]: ERROR oslo_vmware.rw_handles [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5221efaf-5e51-4e21-756d-1fbc178d8636/disk-0.vmdk due to incomplete transfer. [ 640.750696] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f2cb768a-d767-4b24-b80a-3619735b70cb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.752253] env[68638]: DEBUG oslo_vmware.api [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833170, 'name': PowerOnVM_Task, 'duration_secs': 0.841238} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.752536] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 640.752820] env[68638]: INFO nova.compute.manager [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Took 8.61 seconds to spawn the instance on the hypervisor. [ 640.752981] env[68638]: DEBUG nova.compute.manager [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 640.754097] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77cd9098-af57-4470-b027-1f6b3cd0914d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.758833] env[68638]: DEBUG oslo_vmware.rw_handles [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5221efaf-5e51-4e21-756d-1fbc178d8636/disk-0.vmdk. {{(pid=68638) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 640.759049] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Uploaded image 0a659ca3-2412-4223-9a1b-1adc578f94a6 to the Glance image server {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 640.761336] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Destroying the VM {{(pid=68638) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 640.762891] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e90e4313-2948-42da-91d9-b12eef8869d3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.771052] env[68638]: DEBUG oslo_vmware.api [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Waiting for the task: (returnval){ [ 640.771052] env[68638]: value = "task-2833171" [ 640.771052] env[68638]: _type = "Task" [ 640.771052] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.778688] env[68638]: DEBUG oslo_vmware.api [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833171, 'name': Destroy_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.807816] env[68638]: DEBUG nova.scheduler.client.report [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 641.051293] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b1f29d-2737-38b3-4eb6-85d4eb8e4c44, 'name': SearchDatastore_Task, 'duration_secs': 0.028371} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.051608] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 641.051920] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] be761cf1-0949-42c0-8a38-58af33113a03/be761cf1-0949-42c0-8a38-58af33113a03.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 641.052261] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c37d6f52-f9cc-44d8-8283-5a563a994517 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.059380] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Waiting for the task: (returnval){ [ 641.059380] env[68638]: value = "task-2833172" [ 641.059380] env[68638]: _type = "Task" [ 641.059380] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.068518] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833172, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.104652] env[68638]: DEBUG nova.network.neutron [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Successfully updated port: 6a0a18ba-0a6c-47d5-9f8a-f4dced2c324f {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 641.210375] env[68638]: DEBUG oslo_concurrency.lockutils [req-7c162ff5-3d2b-4a17-8f02-4896a64290d0 req-a95e9d6f-4ccc-4476-a52b-ce2229e08496 service nova] Releasing lock "refresh_cache-be761cf1-0949-42c0-8a38-58af33113a03" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 641.280362] env[68638]: INFO nova.compute.manager [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Took 30.46 seconds to build instance. [ 641.284802] env[68638]: DEBUG oslo_vmware.api [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833171, 'name': Destroy_Task, 'duration_secs': 0.322396} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.285050] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Destroyed the VM [ 641.285287] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Deleting Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 641.285527] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-1b64ea68-0060-44ca-b4b2-0a817e10192d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.292679] env[68638]: DEBUG oslo_vmware.api [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Waiting for the task: (returnval){ [ 641.292679] env[68638]: value = "task-2833173" [ 641.292679] env[68638]: _type = "Task" [ 641.292679] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.302177] env[68638]: DEBUG oslo_vmware.api [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833173, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.312407] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.199s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 641.314754] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.656s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 641.316494] env[68638]: INFO nova.compute.claims [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 641.340968] env[68638]: INFO nova.scheduler.client.report [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Deleted allocations for instance a5e993de-7aad-4b34-8946-563dc69a6f25 [ 641.570106] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833172, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.607087] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquiring lock "refresh_cache-168c2937-f8ce-472f-b21f-e48eed909f43" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.607289] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquired lock "refresh_cache-168c2937-f8ce-472f-b21f-e48eed909f43" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.607435] env[68638]: DEBUG nova.network.neutron [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 641.782203] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f15774a5-4c8a-46ab-888b-69d261c91d00 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "8fe9ba7e-021c-4b0f-a9ba-df7a6b753248" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.268s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 641.804304] env[68638]: DEBUG oslo_vmware.api [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833173, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.851759] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fefbe81f-c949-4623-b83b-513e24e3e66a tempest-ImagesNegativeTestJSON-720908012 tempest-ImagesNegativeTestJSON-720908012-project-member] Lock "a5e993de-7aad-4b34-8946-563dc69a6f25" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.417s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 642.071994] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833172, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.797668} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.071994] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] be761cf1-0949-42c0-8a38-58af33113a03/be761cf1-0949-42c0-8a38-58af33113a03.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 642.072274] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 642.072399] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-414b9684-90ea-489e-9e72-385e54773146 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.079070] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Waiting for the task: (returnval){ [ 642.079070] env[68638]: value = "task-2833174" [ 642.079070] env[68638]: _type = "Task" [ 642.079070] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.087296] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833174, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.149163] env[68638]: DEBUG nova.network.neutron [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 642.227953] env[68638]: DEBUG nova.compute.manager [req-20cb48b2-cbcc-43d8-8f37-4f169843d455 req-13b9a9be-2cfb-4b6f-ad57-8dd18725d9c1 service nova] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Received event network-changed-aaad37b0-74f3-46c9-91b1-9f850314fcec {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 642.228154] env[68638]: DEBUG nova.compute.manager [req-20cb48b2-cbcc-43d8-8f37-4f169843d455 req-13b9a9be-2cfb-4b6f-ad57-8dd18725d9c1 service nova] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Refreshing instance network info cache due to event network-changed-aaad37b0-74f3-46c9-91b1-9f850314fcec. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 642.228367] env[68638]: DEBUG oslo_concurrency.lockutils [req-20cb48b2-cbcc-43d8-8f37-4f169843d455 req-13b9a9be-2cfb-4b6f-ad57-8dd18725d9c1 service nova] Acquiring lock "refresh_cache-32efc578-2cf9-4b61-bbaa-aa7031a04e33" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.228509] env[68638]: DEBUG oslo_concurrency.lockutils [req-20cb48b2-cbcc-43d8-8f37-4f169843d455 req-13b9a9be-2cfb-4b6f-ad57-8dd18725d9c1 service nova] Acquired lock "refresh_cache-32efc578-2cf9-4b61-bbaa-aa7031a04e33" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 642.228670] env[68638]: DEBUG nova.network.neutron [req-20cb48b2-cbcc-43d8-8f37-4f169843d455 req-13b9a9be-2cfb-4b6f-ad57-8dd18725d9c1 service nova] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Refreshing network info cache for port aaad37b0-74f3-46c9-91b1-9f850314fcec {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 642.285188] env[68638]: DEBUG nova.compute.manager [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 642.304440] env[68638]: DEBUG oslo_vmware.api [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833173, 'name': RemoveSnapshot_Task, 'duration_secs': 0.708237} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.304723] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Deleted Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 642.305029] env[68638]: INFO nova.compute.manager [None req-66a2abfa-b8ce-4188-b612-3f841de47e96 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Took 16.56 seconds to snapshot the instance on the hypervisor. [ 642.338338] env[68638]: DEBUG nova.network.neutron [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Updating instance_info_cache with network_info: [{"id": "6a0a18ba-0a6c-47d5-9f8a-f4dced2c324f", "address": "fa:16:3e:e5:ba:33", "network": {"id": "4ee2b8ce-01eb-4d0d-8592-64c3bbb04c98", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2125764351-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9ffb656ebf844d4b71f49b35a594d4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a0a18ba-0a", "ovs_interfaceid": "6a0a18ba-0a6c-47d5-9f8a-f4dced2c324f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.541190] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 642.541733] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 642.591200] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833174, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064423} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.595173] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 642.596866] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99f990df-90ae-49ba-b38b-6d97fca67a57 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.624881] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Reconfiguring VM instance instance-00000016 to attach disk [datastore2] be761cf1-0949-42c0-8a38-58af33113a03/be761cf1-0949-42c0-8a38-58af33113a03.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 642.628084] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a467517-f1ab-4604-ac49-262a5e391b82 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.648591] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Waiting for the task: (returnval){ [ 642.648591] env[68638]: value = "task-2833175" [ 642.648591] env[68638]: _type = "Task" [ 642.648591] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.659742] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833175, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.819442] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 642.842864] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Releasing lock "refresh_cache-168c2937-f8ce-472f-b21f-e48eed909f43" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 642.843216] env[68638]: DEBUG nova.compute.manager [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Instance network_info: |[{"id": "6a0a18ba-0a6c-47d5-9f8a-f4dced2c324f", "address": "fa:16:3e:e5:ba:33", "network": {"id": "4ee2b8ce-01eb-4d0d-8592-64c3bbb04c98", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2125764351-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9ffb656ebf844d4b71f49b35a594d4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a0a18ba-0a", "ovs_interfaceid": "6a0a18ba-0a6c-47d5-9f8a-f4dced2c324f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 642.843624] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:ba:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '86a35d07-53d3-46b3-92cb-ae34236c0f41', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6a0a18ba-0a6c-47d5-9f8a-f4dced2c324f', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 642.854019] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 642.854761] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 642.857322] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3e8871f4-bb8b-4a97-bdc9-e1b5a4ef9ff7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.881921] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 642.881921] env[68638]: value = "task-2833176" [ 642.881921] env[68638]: _type = "Task" [ 642.881921] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.891503] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833176, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.915165] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bbb52ba-8fdd-4cde-b90e-8f2264f08f70 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.923846] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d913dfb4-13c6-403c-a34d-8f8433b22591 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.958876] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7863adb2-b8f3-47b8-a007-6ba0e05ffac4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.966639] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc6c965-bb30-4305-b9ed-8c9c8c8c37e6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.980979] env[68638]: DEBUG nova.compute.provider_tree [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 643.050692] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 643.051206] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 643.051412] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 643.051609] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 643.052772] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 643.052772] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 643.052772] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68638) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 643.052772] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager.update_available_resource {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 643.122165] env[68638]: DEBUG nova.network.neutron [req-20cb48b2-cbcc-43d8-8f37-4f169843d455 req-13b9a9be-2cfb-4b6f-ad57-8dd18725d9c1 service nova] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Updated VIF entry in instance network info cache for port aaad37b0-74f3-46c9-91b1-9f850314fcec. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 643.123580] env[68638]: DEBUG nova.network.neutron [req-20cb48b2-cbcc-43d8-8f37-4f169843d455 req-13b9a9be-2cfb-4b6f-ad57-8dd18725d9c1 service nova] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Updating instance_info_cache with network_info: [{"id": "aaad37b0-74f3-46c9-91b1-9f850314fcec", "address": "fa:16:3e:59:83:6f", "network": {"id": "ca367ad5-0507-4217-820a-b92e63df0cce", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1321393658-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d94444f92bb741739e4b3f8dfb3244b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37fb1918-d178-4e12-93e6-316381e78be4", "external-id": "nsx-vlan-transportzone-763", "segmentation_id": 763, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaaad37b0-74", "ovs_interfaceid": "aaad37b0-74f3-46c9-91b1-9f850314fcec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.159027] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833175, 'name': ReconfigVM_Task, 'duration_secs': 0.30641} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.159336] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Reconfigured VM instance instance-00000016 to attach disk [datastore2] be761cf1-0949-42c0-8a38-58af33113a03/be761cf1-0949-42c0-8a38-58af33113a03.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 643.159983] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7a52fb1f-5196-4ec8-b40a-41f98e6fcdb8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.166823] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Waiting for the task: (returnval){ [ 643.166823] env[68638]: value = "task-2833177" [ 643.166823] env[68638]: _type = "Task" [ 643.166823] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.174726] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833177, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.391930] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833176, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.484265] env[68638]: DEBUG nova.scheduler.client.report [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 643.557033] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 643.626784] env[68638]: DEBUG oslo_concurrency.lockutils [req-20cb48b2-cbcc-43d8-8f37-4f169843d455 req-13b9a9be-2cfb-4b6f-ad57-8dd18725d9c1 service nova] Releasing lock "refresh_cache-32efc578-2cf9-4b61-bbaa-aa7031a04e33" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 643.627217] env[68638]: DEBUG nova.compute.manager [req-20cb48b2-cbcc-43d8-8f37-4f169843d455 req-13b9a9be-2cfb-4b6f-ad57-8dd18725d9c1 service nova] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Received event network-vif-plugged-6a0a18ba-0a6c-47d5-9f8a-f4dced2c324f {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 643.627272] env[68638]: DEBUG oslo_concurrency.lockutils [req-20cb48b2-cbcc-43d8-8f37-4f169843d455 req-13b9a9be-2cfb-4b6f-ad57-8dd18725d9c1 service nova] Acquiring lock "168c2937-f8ce-472f-b21f-e48eed909f43-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 643.627438] env[68638]: DEBUG oslo_concurrency.lockutils [req-20cb48b2-cbcc-43d8-8f37-4f169843d455 req-13b9a9be-2cfb-4b6f-ad57-8dd18725d9c1 service nova] Lock "168c2937-f8ce-472f-b21f-e48eed909f43-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 643.627596] env[68638]: DEBUG oslo_concurrency.lockutils [req-20cb48b2-cbcc-43d8-8f37-4f169843d455 req-13b9a9be-2cfb-4b6f-ad57-8dd18725d9c1 service nova] Lock "168c2937-f8ce-472f-b21f-e48eed909f43-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 643.627770] env[68638]: DEBUG nova.compute.manager [req-20cb48b2-cbcc-43d8-8f37-4f169843d455 req-13b9a9be-2cfb-4b6f-ad57-8dd18725d9c1 service nova] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] No waiting events found dispatching network-vif-plugged-6a0a18ba-0a6c-47d5-9f8a-f4dced2c324f {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 643.627929] env[68638]: WARNING nova.compute.manager [req-20cb48b2-cbcc-43d8-8f37-4f169843d455 req-13b9a9be-2cfb-4b6f-ad57-8dd18725d9c1 service nova] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Received unexpected event network-vif-plugged-6a0a18ba-0a6c-47d5-9f8a-f4dced2c324f for instance with vm_state building and task_state spawning. [ 643.628099] env[68638]: DEBUG nova.compute.manager [req-20cb48b2-cbcc-43d8-8f37-4f169843d455 req-13b9a9be-2cfb-4b6f-ad57-8dd18725d9c1 service nova] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Received event network-changed-6a0a18ba-0a6c-47d5-9f8a-f4dced2c324f {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 643.628253] env[68638]: DEBUG nova.compute.manager [req-20cb48b2-cbcc-43d8-8f37-4f169843d455 req-13b9a9be-2cfb-4b6f-ad57-8dd18725d9c1 service nova] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Refreshing instance network info cache due to event network-changed-6a0a18ba-0a6c-47d5-9f8a-f4dced2c324f. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 643.628437] env[68638]: DEBUG oslo_concurrency.lockutils [req-20cb48b2-cbcc-43d8-8f37-4f169843d455 req-13b9a9be-2cfb-4b6f-ad57-8dd18725d9c1 service nova] Acquiring lock "refresh_cache-168c2937-f8ce-472f-b21f-e48eed909f43" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.628570] env[68638]: DEBUG oslo_concurrency.lockutils [req-20cb48b2-cbcc-43d8-8f37-4f169843d455 req-13b9a9be-2cfb-4b6f-ad57-8dd18725d9c1 service nova] Acquired lock "refresh_cache-168c2937-f8ce-472f-b21f-e48eed909f43" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 643.628726] env[68638]: DEBUG nova.network.neutron [req-20cb48b2-cbcc-43d8-8f37-4f169843d455 req-13b9a9be-2cfb-4b6f-ad57-8dd18725d9c1 service nova] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Refreshing network info cache for port 6a0a18ba-0a6c-47d5-9f8a-f4dced2c324f {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 643.677891] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833177, 'name': Rename_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.892869] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833176, 'name': CreateVM_Task, 'duration_secs': 0.683379} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.893070] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 643.893798] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.893971] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 643.894338] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 643.894613] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4b14396-1f62-4ad2-b9cf-4a6ff45281f3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.899415] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Waiting for the task: (returnval){ [ 643.899415] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52a74908-d78a-a457-a31b-238bd2845c16" [ 643.899415] env[68638]: _type = "Task" [ 643.899415] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.907862] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a74908-d78a-a457-a31b-238bd2845c16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.990229] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.675s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 643.990701] env[68638]: DEBUG nova.compute.manager [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 643.993617] env[68638]: DEBUG oslo_concurrency.lockutils [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.758s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 643.995219] env[68638]: INFO nova.compute.claims [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 644.179999] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833177, 'name': Rename_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.410137] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a74908-d78a-a457-a31b-238bd2845c16, 'name': SearchDatastore_Task, 'duration_secs': 0.009364} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.410137] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 644.410137] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 644.410416] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.410416] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 644.410545] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 644.412023] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5aa82ed3-875f-4266-9bef-595a04406e50 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.415216] env[68638]: DEBUG nova.network.neutron [req-20cb48b2-cbcc-43d8-8f37-4f169843d455 req-13b9a9be-2cfb-4b6f-ad57-8dd18725d9c1 service nova] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Updated VIF entry in instance network info cache for port 6a0a18ba-0a6c-47d5-9f8a-f4dced2c324f. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 644.415526] env[68638]: DEBUG nova.network.neutron [req-20cb48b2-cbcc-43d8-8f37-4f169843d455 req-13b9a9be-2cfb-4b6f-ad57-8dd18725d9c1 service nova] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Updating instance_info_cache with network_info: [{"id": "6a0a18ba-0a6c-47d5-9f8a-f4dced2c324f", "address": "fa:16:3e:e5:ba:33", "network": {"id": "4ee2b8ce-01eb-4d0d-8592-64c3bbb04c98", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2125764351-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9ffb656ebf844d4b71f49b35a594d4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a0a18ba-0a", "ovs_interfaceid": "6a0a18ba-0a6c-47d5-9f8a-f4dced2c324f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.424352] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 644.424538] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 644.425811] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e532061-fd20-4575-be24-a21be0d38bee {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.431628] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Waiting for the task: (returnval){ [ 644.431628] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e10691-73d1-3df3-c7d5-9bd966421468" [ 644.431628] env[68638]: _type = "Task" [ 644.431628] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.439213] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e10691-73d1-3df3-c7d5-9bd966421468, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.488874] env[68638]: DEBUG nova.compute.manager [req-681dd9d6-4452-4674-afa0-ec288adb65b6 req-c98c233a-bb02-4bc6-b892-5293465f593e service nova] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Received event network-changed-aaad37b0-74f3-46c9-91b1-9f850314fcec {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 644.489012] env[68638]: DEBUG nova.compute.manager [req-681dd9d6-4452-4674-afa0-ec288adb65b6 req-c98c233a-bb02-4bc6-b892-5293465f593e service nova] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Refreshing instance network info cache due to event network-changed-aaad37b0-74f3-46c9-91b1-9f850314fcec. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 644.489272] env[68638]: DEBUG oslo_concurrency.lockutils [req-681dd9d6-4452-4674-afa0-ec288adb65b6 req-c98c233a-bb02-4bc6-b892-5293465f593e service nova] Acquiring lock "refresh_cache-32efc578-2cf9-4b61-bbaa-aa7031a04e33" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.489422] env[68638]: DEBUG oslo_concurrency.lockutils [req-681dd9d6-4452-4674-afa0-ec288adb65b6 req-c98c233a-bb02-4bc6-b892-5293465f593e service nova] Acquired lock "refresh_cache-32efc578-2cf9-4b61-bbaa-aa7031a04e33" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 644.489591] env[68638]: DEBUG nova.network.neutron [req-681dd9d6-4452-4674-afa0-ec288adb65b6 req-c98c233a-bb02-4bc6-b892-5293465f593e service nova] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Refreshing network info cache for port aaad37b0-74f3-46c9-91b1-9f850314fcec {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 644.500119] env[68638]: DEBUG nova.compute.utils [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 644.507647] env[68638]: DEBUG nova.compute.manager [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 644.507791] env[68638]: DEBUG nova.network.neutron [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 644.576989] env[68638]: DEBUG nova.policy [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f7e9f64ef5ef4f2c9d8100ed55e7cbc8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '373459ee626847e9886e5ff353729280', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 644.683066] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833177, 'name': Rename_Task, 'duration_secs': 1.144064} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.683361] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 644.683604] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8bbd5dbc-458c-47c0-924e-77b54594d2ca {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.689967] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Waiting for the task: (returnval){ [ 644.689967] env[68638]: value = "task-2833178" [ 644.689967] env[68638]: _type = "Task" [ 644.689967] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.704657] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833178, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.917858] env[68638]: DEBUG oslo_concurrency.lockutils [req-20cb48b2-cbcc-43d8-8f37-4f169843d455 req-13b9a9be-2cfb-4b6f-ad57-8dd18725d9c1 service nova] Releasing lock "refresh_cache-168c2937-f8ce-472f-b21f-e48eed909f43" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 644.943067] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e10691-73d1-3df3-c7d5-9bd966421468, 'name': SearchDatastore_Task, 'duration_secs': 0.027688} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.944717] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1095cef-7437-4029-9389-c830a4be7fba {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.951282] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Waiting for the task: (returnval){ [ 644.951282] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52c2c6ea-619c-3091-16b5-9b85a9424dab" [ 644.951282] env[68638]: _type = "Task" [ 644.951282] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.963888] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52c2c6ea-619c-3091-16b5-9b85a9424dab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.011027] env[68638]: DEBUG nova.compute.manager [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 645.206137] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833178, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.290323] env[68638]: DEBUG nova.network.neutron [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Successfully created port: 1a99ea84-dd8a-40bf-bfb2-405fdd3a9919 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 645.293471] env[68638]: DEBUG nova.network.neutron [req-681dd9d6-4452-4674-afa0-ec288adb65b6 req-c98c233a-bb02-4bc6-b892-5293465f593e service nova] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Updated VIF entry in instance network info cache for port aaad37b0-74f3-46c9-91b1-9f850314fcec. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 645.293471] env[68638]: DEBUG nova.network.neutron [req-681dd9d6-4452-4674-afa0-ec288adb65b6 req-c98c233a-bb02-4bc6-b892-5293465f593e service nova] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Updating instance_info_cache with network_info: [{"id": "aaad37b0-74f3-46c9-91b1-9f850314fcec", "address": "fa:16:3e:59:83:6f", "network": {"id": "ca367ad5-0507-4217-820a-b92e63df0cce", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1321393658-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d94444f92bb741739e4b3f8dfb3244b2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37fb1918-d178-4e12-93e6-316381e78be4", "external-id": "nsx-vlan-transportzone-763", "segmentation_id": 763, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaaad37b0-74", "ovs_interfaceid": "aaad37b0-74f3-46c9-91b1-9f850314fcec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.368519] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Acquiring lock "32efc578-2cf9-4b61-bbaa-aa7031a04e33" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 645.368994] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Lock "32efc578-2cf9-4b61-bbaa-aa7031a04e33" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 645.369271] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Acquiring lock "32efc578-2cf9-4b61-bbaa-aa7031a04e33-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 645.369359] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Lock "32efc578-2cf9-4b61-bbaa-aa7031a04e33-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 645.369504] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Lock "32efc578-2cf9-4b61-bbaa-aa7031a04e33-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 645.375041] env[68638]: INFO nova.compute.manager [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Terminating instance [ 645.464805] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52c2c6ea-619c-3091-16b5-9b85a9424dab, 'name': SearchDatastore_Task, 'duration_secs': 0.017889} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.465027] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 645.465680] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 168c2937-f8ce-472f-b21f-e48eed909f43/168c2937-f8ce-472f-b21f-e48eed909f43.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 645.471465] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4ea55d6b-2fea-4408-a6bc-76a6cfe11192 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.481252] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Waiting for the task: (returnval){ [ 645.481252] env[68638]: value = "task-2833179" [ 645.481252] env[68638]: _type = "Task" [ 645.481252] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.492417] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2833179, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.701675] env[68638]: DEBUG oslo_vmware.api [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833178, 'name': PowerOnVM_Task, 'duration_secs': 0.746519} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.704639] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 645.704901] env[68638]: INFO nova.compute.manager [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Took 10.75 seconds to spawn the instance on the hypervisor. [ 645.705135] env[68638]: DEBUG nova.compute.manager [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 645.706172] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cadb449-c43c-4df9-9386-99958864b3b5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.722993] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c752a67-3699-4e0a-82b2-b6b64f3772b2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.733222] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b483a597-07a5-45b9-8528-aff82e04bcc7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.767760] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4859d2d9-5bab-4e72-9efd-ccbf19a9af67 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.776964] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a53f7ceb-006c-4375-998c-9536e8e8561a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.795647] env[68638]: DEBUG nova.compute.provider_tree [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 645.799528] env[68638]: DEBUG oslo_concurrency.lockutils [req-681dd9d6-4452-4674-afa0-ec288adb65b6 req-c98c233a-bb02-4bc6-b892-5293465f593e service nova] Releasing lock "refresh_cache-32efc578-2cf9-4b61-bbaa-aa7031a04e33" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 645.881057] env[68638]: DEBUG nova.compute.manager [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 645.881482] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 645.882835] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c8bdd3-f72f-41e4-aa66-332ec23deb2e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.893419] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 645.893830] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-87edc148-b7f5-4d5e-9230-e54691ffdb17 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.901174] env[68638]: DEBUG oslo_vmware.api [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Waiting for the task: (returnval){ [ 645.901174] env[68638]: value = "task-2833180" [ 645.901174] env[68638]: _type = "Task" [ 645.901174] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.911221] env[68638]: DEBUG oslo_vmware.api [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Task: {'id': task-2833180, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.989915] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2833179, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.032621] env[68638]: DEBUG nova.compute.manager [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 646.062535] env[68638]: DEBUG nova.virt.hardware [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 646.062878] env[68638]: DEBUG nova.virt.hardware [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 646.063181] env[68638]: DEBUG nova.virt.hardware [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 646.063491] env[68638]: DEBUG nova.virt.hardware [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 646.063679] env[68638]: DEBUG nova.virt.hardware [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 646.063953] env[68638]: DEBUG nova.virt.hardware [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 646.064192] env[68638]: DEBUG nova.virt.hardware [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 646.064415] env[68638]: DEBUG nova.virt.hardware [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 646.064728] env[68638]: DEBUG nova.virt.hardware [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 646.064875] env[68638]: DEBUG nova.virt.hardware [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 646.065131] env[68638]: DEBUG nova.virt.hardware [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 646.066351] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d7cf7cd-0d4f-45ba-90b7-bc79e15a68d7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.077975] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de09c8e4-698c-44f4-bdcc-fa38e093852f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.228988] env[68638]: INFO nova.compute.manager [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Took 32.93 seconds to build instance. [ 646.300335] env[68638]: DEBUG nova.scheduler.client.report [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 646.383748] env[68638]: DEBUG nova.compute.manager [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 646.384703] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0824e9b3-6a23-4963-a147-65ebac5d9c58 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.416632] env[68638]: DEBUG oslo_vmware.api [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Task: {'id': task-2833180, 'name': PowerOffVM_Task, 'duration_secs': 0.277706} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.417082] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 646.417294] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 646.418503] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b294291b-b9ca-4221-bd9f-891f1d990b79 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.489439] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2833179, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.56914} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.489784] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 168c2937-f8ce-472f-b21f-e48eed909f43/168c2937-f8ce-472f-b21f-e48eed909f43.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 646.493018] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 646.493018] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-914e0086-632e-410e-b10f-732fa1e0c42b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.499020] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Waiting for the task: (returnval){ [ 646.499020] env[68638]: value = "task-2833182" [ 646.499020] env[68638]: _type = "Task" [ 646.499020] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.507526] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2833182, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.519727] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 646.519985] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 646.520184] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Deleting the datastore file [datastore2] 32efc578-2cf9-4b61-bbaa-aa7031a04e33 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 646.520463] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6de996ad-45a3-4cf3-871e-7290c419878f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.527212] env[68638]: DEBUG oslo_vmware.api [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Waiting for the task: (returnval){ [ 646.527212] env[68638]: value = "task-2833183" [ 646.527212] env[68638]: _type = "Task" [ 646.527212] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.536113] env[68638]: DEBUG oslo_vmware.api [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Task: {'id': task-2833183, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.732086] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec5d343-0ece-41bb-8979-6012df72eb86 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Lock "be761cf1-0949-42c0-8a38-58af33113a03" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.716s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 646.810031] env[68638]: DEBUG oslo_concurrency.lockutils [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.815s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 646.810031] env[68638]: DEBUG nova.compute.manager [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 646.815507] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.170s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 646.819939] env[68638]: INFO nova.compute.claims [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 646.899246] env[68638]: INFO nova.compute.manager [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] instance snapshotting [ 646.904472] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cdbfefe-67bb-4126-a591-3299f693696d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.927483] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d5b3ca9-b2f7-43af-96cb-1f3fa0d062c6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.007790] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2833182, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070918} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.009061] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 647.009473] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e3829f8-d9a9-4399-8a78-ca5665d11fe4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.038178] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Reconfiguring VM instance instance-00000017 to attach disk [datastore2] 168c2937-f8ce-472f-b21f-e48eed909f43/168c2937-f8ce-472f-b21f-e48eed909f43.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 647.039334] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-39341368-7ac9-430c-89c4-d24aacba1c45 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.064609] env[68638]: DEBUG oslo_vmware.api [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Task: {'id': task-2833183, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.066185] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Waiting for the task: (returnval){ [ 647.066185] env[68638]: value = "task-2833184" [ 647.066185] env[68638]: _type = "Task" [ 647.066185] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.074815] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2833184, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.237316] env[68638]: DEBUG nova.compute.manager [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 647.327704] env[68638]: DEBUG nova.compute.utils [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 647.329755] env[68638]: DEBUG nova.compute.manager [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 647.329944] env[68638]: DEBUG nova.network.neutron [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 647.387501] env[68638]: DEBUG nova.policy [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc7d5c18ef984e8fa66a83999076faa2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '66afa9ca42294c8e9e8d913b14e4a209', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 647.438835] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Creating Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 647.439177] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-46333e93-8572-47f0-9973-047988dc158b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.448098] env[68638]: DEBUG oslo_vmware.api [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Waiting for the task: (returnval){ [ 647.448098] env[68638]: value = "task-2833185" [ 647.448098] env[68638]: _type = "Task" [ 647.448098] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.463531] env[68638]: DEBUG oslo_vmware.api [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833185, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.549408] env[68638]: DEBUG oslo_vmware.api [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Task: {'id': task-2833183, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.571083} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.549636] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 647.550234] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 647.550307] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 647.550583] env[68638]: INFO nova.compute.manager [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Took 1.67 seconds to destroy the instance on the hypervisor. [ 647.550698] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 647.551307] env[68638]: DEBUG nova.compute.manager [-] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 647.551307] env[68638]: DEBUG nova.network.neutron [-] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 647.581949] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2833184, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.690907] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Acquiring lock "072be237-c51e-43d2-ad84-46122ef9f335" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 647.691168] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Lock "072be237-c51e-43d2-ad84-46122ef9f335" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 647.751259] env[68638]: DEBUG nova.network.neutron [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Successfully updated port: 1a99ea84-dd8a-40bf-bfb2-405fdd3a9919 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 647.766619] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 647.835744] env[68638]: DEBUG nova.compute.manager [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 647.850058] env[68638]: DEBUG nova.compute.manager [req-3e0a6cf4-1096-440c-b80f-b2e8465ca840 req-60aa9fb5-fa72-4e76-a432-7f565b3418db service nova] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Received event network-vif-plugged-1a99ea84-dd8a-40bf-bfb2-405fdd3a9919 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 647.850302] env[68638]: DEBUG oslo_concurrency.lockutils [req-3e0a6cf4-1096-440c-b80f-b2e8465ca840 req-60aa9fb5-fa72-4e76-a432-7f565b3418db service nova] Acquiring lock "f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 647.850563] env[68638]: DEBUG oslo_concurrency.lockutils [req-3e0a6cf4-1096-440c-b80f-b2e8465ca840 req-60aa9fb5-fa72-4e76-a432-7f565b3418db service nova] Lock "f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 647.850859] env[68638]: DEBUG oslo_concurrency.lockutils [req-3e0a6cf4-1096-440c-b80f-b2e8465ca840 req-60aa9fb5-fa72-4e76-a432-7f565b3418db service nova] Lock "f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 647.852218] env[68638]: DEBUG nova.compute.manager [req-3e0a6cf4-1096-440c-b80f-b2e8465ca840 req-60aa9fb5-fa72-4e76-a432-7f565b3418db service nova] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] No waiting events found dispatching network-vif-plugged-1a99ea84-dd8a-40bf-bfb2-405fdd3a9919 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 647.852486] env[68638]: WARNING nova.compute.manager [req-3e0a6cf4-1096-440c-b80f-b2e8465ca840 req-60aa9fb5-fa72-4e76-a432-7f565b3418db service nova] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Received unexpected event network-vif-plugged-1a99ea84-dd8a-40bf-bfb2-405fdd3a9919 for instance with vm_state building and task_state spawning. [ 647.966325] env[68638]: DEBUG oslo_vmware.api [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833185, 'name': CreateSnapshot_Task, 'duration_secs': 0.462214} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.966737] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Created Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 647.967725] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f39db348-7a9f-43a4-b2ac-3ab97ad50a43 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.037211] env[68638]: DEBUG nova.network.neutron [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Successfully created port: e76019da-d59a-45b4-a8e3-6fcded54f7b8 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 648.083978] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2833184, 'name': ReconfigVM_Task, 'duration_secs': 0.632388} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.084823] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Reconfigured VM instance instance-00000017 to attach disk [datastore2] 168c2937-f8ce-472f-b21f-e48eed909f43/168c2937-f8ce-472f-b21f-e48eed909f43.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 648.085641] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d6f5d312-f047-44b9-a8be-07e3668e949a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.094471] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Waiting for the task: (returnval){ [ 648.094471] env[68638]: value = "task-2833186" [ 648.094471] env[68638]: _type = "Task" [ 648.094471] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.103949] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2833186, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.112207] env[68638]: DEBUG nova.compute.manager [req-2bbe2ff6-a7b0-4771-beb8-2d1f729cf479 req-80c41f47-146a-48ef-b9c9-fd2fe747b0fc service nova] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Received event network-vif-deleted-aaad37b0-74f3-46c9-91b1-9f850314fcec {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 648.113671] env[68638]: INFO nova.compute.manager [req-2bbe2ff6-a7b0-4771-beb8-2d1f729cf479 req-80c41f47-146a-48ef-b9c9-fd2fe747b0fc service nova] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Neutron deleted interface aaad37b0-74f3-46c9-91b1-9f850314fcec; detaching it from the instance and deleting it from the info cache [ 648.113671] env[68638]: DEBUG nova.network.neutron [req-2bbe2ff6-a7b0-4771-beb8-2d1f729cf479 req-80c41f47-146a-48ef-b9c9-fd2fe747b0fc service nova] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.260352] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "refresh_cache-f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.260352] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquired lock "refresh_cache-f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 648.260352] env[68638]: DEBUG nova.network.neutron [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 648.495978] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Creating linked-clone VM from snapshot {{(pid=68638) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 648.496333] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-01518436-d359-40a3-9474-127503ed2fd0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.500990] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a20043e6-c033-476e-9a80-6d0a7faab625 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.512555] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0caed136-5e1c-4023-9027-2a1a49e698b8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.517086] env[68638]: DEBUG oslo_vmware.api [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Waiting for the task: (returnval){ [ 648.517086] env[68638]: value = "task-2833187" [ 648.517086] env[68638]: _type = "Task" [ 648.517086] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.547399] env[68638]: DEBUG nova.network.neutron [-] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.550182] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4316b87a-9bfc-4921-84fc-e7a0dd772e5c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.556408] env[68638]: DEBUG oslo_vmware.api [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833187, 'name': CloneVM_Task} progress is 23%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.562582] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b4057d0-7e3a-4faa-b13a-e567a4c293c6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.579564] env[68638]: DEBUG nova.compute.provider_tree [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 648.605771] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2833186, 'name': Rename_Task, 'duration_secs': 0.162941} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.606551] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 648.608735] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1687c9f2-04be-47ad-b164-16d5aaa7b7e8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.615694] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Waiting for the task: (returnval){ [ 648.615694] env[68638]: value = "task-2833188" [ 648.615694] env[68638]: _type = "Task" [ 648.615694] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.618121] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-38557947-2085-4341-9d89-71116d97c7dd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.633210] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2833188, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.638738] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9067ddf7-c14d-4c00-9ba1-c64036210492 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.405796] env[68638]: DEBUG nova.compute.manager [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 649.407940] env[68638]: INFO nova.compute.manager [-] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Took 1.86 seconds to deallocate network for instance. [ 649.408749] env[68638]: DEBUG nova.scheduler.client.report [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 649.412469] env[68638]: DEBUG nova.compute.manager [req-2bbe2ff6-a7b0-4771-beb8-2d1f729cf479 req-80c41f47-146a-48ef-b9c9-fd2fe747b0fc service nova] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Detach interface failed, port_id=aaad37b0-74f3-46c9-91b1-9f850314fcec, reason: Instance 32efc578-2cf9-4b61-bbaa-aa7031a04e33 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 649.433644] env[68638]: DEBUG oslo_vmware.api [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833187, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.433904] env[68638]: DEBUG oslo_vmware.api [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2833188, 'name': PowerOnVM_Task, 'duration_secs': 0.618273} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.434797] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 649.434978] env[68638]: INFO nova.compute.manager [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Took 9.31 seconds to spawn the instance on the hypervisor. [ 649.435173] env[68638]: DEBUG nova.compute.manager [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 649.436259] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8156d54f-7eee-4546-87bf-0935cd4eb68b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.452038] env[68638]: DEBUG nova.virt.hardware [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 649.452290] env[68638]: DEBUG nova.virt.hardware [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 649.452453] env[68638]: DEBUG nova.virt.hardware [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 649.452628] env[68638]: DEBUG nova.virt.hardware [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 649.452780] env[68638]: DEBUG nova.virt.hardware [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 649.452917] env[68638]: DEBUG nova.virt.hardware [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 649.453143] env[68638]: DEBUG nova.virt.hardware [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 649.453308] env[68638]: DEBUG nova.virt.hardware [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 649.453485] env[68638]: DEBUG nova.virt.hardware [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 649.453646] env[68638]: DEBUG nova.virt.hardware [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 649.453828] env[68638]: DEBUG nova.virt.hardware [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 649.454720] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0201b2-88cd-4f01-91aa-5344aec3a62d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.458293] env[68638]: DEBUG nova.network.neutron [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 649.466770] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e456ba9-7555-4c5d-bc79-58515b8d44b1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.770218] env[68638]: DEBUG nova.network.neutron [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Updating instance_info_cache with network_info: [{"id": "1a99ea84-dd8a-40bf-bfb2-405fdd3a9919", "address": "fa:16:3e:56:1c:f4", "network": {"id": "2dc0e495-5a5a-47e0-8c1c-61e000194cc0", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-658812124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "373459ee626847e9886e5ff353729280", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a99ea84-dd", "ovs_interfaceid": "1a99ea84-dd8a-40bf-bfb2-405fdd3a9919", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.894653] env[68638]: DEBUG nova.compute.manager [req-52178e51-b7fc-4473-b203-7b28803ec854 req-917b68bf-9fa3-4e1a-acc0-a3a61f704b52 service nova] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Received event network-changed-1a99ea84-dd8a-40bf-bfb2-405fdd3a9919 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 649.895326] env[68638]: DEBUG nova.compute.manager [req-52178e51-b7fc-4473-b203-7b28803ec854 req-917b68bf-9fa3-4e1a-acc0-a3a61f704b52 service nova] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Refreshing instance network info cache due to event network-changed-1a99ea84-dd8a-40bf-bfb2-405fdd3a9919. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 649.895713] env[68638]: DEBUG oslo_concurrency.lockutils [req-52178e51-b7fc-4473-b203-7b28803ec854 req-917b68bf-9fa3-4e1a-acc0-a3a61f704b52 service nova] Acquiring lock "refresh_cache-f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.913562] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.098s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 649.915462] env[68638]: DEBUG nova.compute.manager [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 649.920339] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.912s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.920339] env[68638]: DEBUG nova.objects.instance [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lazy-loading 'resources' on Instance uuid f767af17-f2bb-461d-9e7f-9c62b5504257 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 649.926772] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.927192] env[68638]: DEBUG oslo_vmware.api [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833187, 'name': CloneVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.928139] env[68638]: DEBUG nova.network.neutron [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Successfully updated port: e76019da-d59a-45b4-a8e3-6fcded54f7b8 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 649.958728] env[68638]: INFO nova.compute.manager [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Took 34.40 seconds to build instance. [ 650.197148] env[68638]: DEBUG nova.compute.manager [req-df1a0630-deef-4034-849b-68b8a7f3a39c req-5a179de9-4b15-428e-931f-016e2283cf81 service nova] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Received event network-vif-plugged-e76019da-d59a-45b4-a8e3-6fcded54f7b8 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 650.197148] env[68638]: DEBUG oslo_concurrency.lockutils [req-df1a0630-deef-4034-849b-68b8a7f3a39c req-5a179de9-4b15-428e-931f-016e2283cf81 service nova] Acquiring lock "1eee31b7-db8b-4765-8cc2-4273717ef86e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 650.197148] env[68638]: DEBUG oslo_concurrency.lockutils [req-df1a0630-deef-4034-849b-68b8a7f3a39c req-5a179de9-4b15-428e-931f-016e2283cf81 service nova] Lock "1eee31b7-db8b-4765-8cc2-4273717ef86e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 650.197148] env[68638]: DEBUG oslo_concurrency.lockutils [req-df1a0630-deef-4034-849b-68b8a7f3a39c req-5a179de9-4b15-428e-931f-016e2283cf81 service nova] Lock "1eee31b7-db8b-4765-8cc2-4273717ef86e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 650.197553] env[68638]: DEBUG nova.compute.manager [req-df1a0630-deef-4034-849b-68b8a7f3a39c req-5a179de9-4b15-428e-931f-016e2283cf81 service nova] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] No waiting events found dispatching network-vif-plugged-e76019da-d59a-45b4-a8e3-6fcded54f7b8 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 650.197898] env[68638]: WARNING nova.compute.manager [req-df1a0630-deef-4034-849b-68b8a7f3a39c req-5a179de9-4b15-428e-931f-016e2283cf81 service nova] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Received unexpected event network-vif-plugged-e76019da-d59a-45b4-a8e3-6fcded54f7b8 for instance with vm_state building and task_state spawning. [ 650.198179] env[68638]: DEBUG nova.compute.manager [req-df1a0630-deef-4034-849b-68b8a7f3a39c req-5a179de9-4b15-428e-931f-016e2283cf81 service nova] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Received event network-changed-e76019da-d59a-45b4-a8e3-6fcded54f7b8 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 650.198437] env[68638]: DEBUG nova.compute.manager [req-df1a0630-deef-4034-849b-68b8a7f3a39c req-5a179de9-4b15-428e-931f-016e2283cf81 service nova] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Refreshing instance network info cache due to event network-changed-e76019da-d59a-45b4-a8e3-6fcded54f7b8. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 650.198720] env[68638]: DEBUG oslo_concurrency.lockutils [req-df1a0630-deef-4034-849b-68b8a7f3a39c req-5a179de9-4b15-428e-931f-016e2283cf81 service nova] Acquiring lock "refresh_cache-1eee31b7-db8b-4765-8cc2-4273717ef86e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.199146] env[68638]: DEBUG oslo_concurrency.lockutils [req-df1a0630-deef-4034-849b-68b8a7f3a39c req-5a179de9-4b15-428e-931f-016e2283cf81 service nova] Acquired lock "refresh_cache-1eee31b7-db8b-4765-8cc2-4273717ef86e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 650.199490] env[68638]: DEBUG nova.network.neutron [req-df1a0630-deef-4034-849b-68b8a7f3a39c req-5a179de9-4b15-428e-931f-016e2283cf81 service nova] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Refreshing network info cache for port e76019da-d59a-45b4-a8e3-6fcded54f7b8 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 650.275018] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Releasing lock "refresh_cache-f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 650.275018] env[68638]: DEBUG nova.compute.manager [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Instance network_info: |[{"id": "1a99ea84-dd8a-40bf-bfb2-405fdd3a9919", "address": "fa:16:3e:56:1c:f4", "network": {"id": "2dc0e495-5a5a-47e0-8c1c-61e000194cc0", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-658812124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "373459ee626847e9886e5ff353729280", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a99ea84-dd", "ovs_interfaceid": "1a99ea84-dd8a-40bf-bfb2-405fdd3a9919", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 650.275188] env[68638]: DEBUG oslo_concurrency.lockutils [req-52178e51-b7fc-4473-b203-7b28803ec854 req-917b68bf-9fa3-4e1a-acc0-a3a61f704b52 service nova] Acquired lock "refresh_cache-f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 650.275188] env[68638]: DEBUG nova.network.neutron [req-52178e51-b7fc-4473-b203-7b28803ec854 req-917b68bf-9fa3-4e1a-acc0-a3a61f704b52 service nova] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Refreshing network info cache for port 1a99ea84-dd8a-40bf-bfb2-405fdd3a9919 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 650.275188] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:1c:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa8c2f93-f287-41b3-adb6-4942a7ea2a0b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1a99ea84-dd8a-40bf-bfb2-405fdd3a9919', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 650.287774] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 650.288947] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 650.289293] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-168fc0ca-1b46-4b92-88ca-e7e37ba14d3c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.316546] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 650.316546] env[68638]: value = "task-2833189" [ 650.316546] env[68638]: _type = "Task" [ 650.316546] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.325754] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833189, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.418941] env[68638]: DEBUG oslo_vmware.api [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833187, 'name': CloneVM_Task, 'duration_secs': 1.41293} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.419654] env[68638]: INFO nova.virt.vmwareapi.vmops [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Created linked-clone VM from snapshot [ 650.420534] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-306cc643-8892-4007-b80e-95cc5ab3d73e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.425373] env[68638]: DEBUG nova.compute.utils [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 650.430353] env[68638]: DEBUG nova.compute.manager [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 650.430899] env[68638]: DEBUG nova.network.neutron [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 650.433431] env[68638]: DEBUG oslo_concurrency.lockutils [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Acquiring lock "refresh_cache-1eee31b7-db8b-4765-8cc2-4273717ef86e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.443033] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Uploading image 63fdcb23-2a1a-4cf7-a2aa-69fc8d769281 {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 650.462049] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3044ace6-569e-4835-975d-a92832bce284 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Lock "168c2937-f8ce-472f-b21f-e48eed909f43" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.585s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 650.487605] env[68638]: DEBUG oslo_vmware.rw_handles [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 650.487605] env[68638]: value = "vm-569805" [ 650.487605] env[68638]: _type = "VirtualMachine" [ 650.487605] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 650.487605] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-81b22e4a-4bc2-44ed-b93e-08868e8f4356 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.496374] env[68638]: DEBUG nova.policy [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '657674c53b994a668f3d11b188648242', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ee599d8c7858456caf4df2fd39189e22', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 650.501504] env[68638]: DEBUG oslo_vmware.rw_handles [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Lease: (returnval){ [ 650.501504] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ac2454-112b-346c-299e-5e4265209d7c" [ 650.501504] env[68638]: _type = "HttpNfcLease" [ 650.501504] env[68638]: } obtained for exporting VM: (result){ [ 650.501504] env[68638]: value = "vm-569805" [ 650.501504] env[68638]: _type = "VirtualMachine" [ 650.501504] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 650.502317] env[68638]: DEBUG oslo_vmware.api [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Waiting for the lease: (returnval){ [ 650.502317] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ac2454-112b-346c-299e-5e4265209d7c" [ 650.502317] env[68638]: _type = "HttpNfcLease" [ 650.502317] env[68638]: } to be ready. {{(pid=68638) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 650.509262] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 650.509262] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ac2454-112b-346c-299e-5e4265209d7c" [ 650.509262] env[68638]: _type = "HttpNfcLease" [ 650.509262] env[68638]: } is ready. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 650.509613] env[68638]: DEBUG oslo_vmware.rw_handles [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 650.509613] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ac2454-112b-346c-299e-5e4265209d7c" [ 650.509613] env[68638]: _type = "HttpNfcLease" [ 650.509613] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 650.510599] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0a13f3-61c2-44ed-a283-3079f6dd2725 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.521918] env[68638]: DEBUG oslo_vmware.rw_handles [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c771b2-ea95-e03a-9836-1df2ea7bcdda/disk-0.vmdk from lease info. {{(pid=68638) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 650.522183] env[68638]: DEBUG oslo_vmware.rw_handles [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c771b2-ea95-e03a-9836-1df2ea7bcdda/disk-0.vmdk for reading. {{(pid=68638) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 650.620731] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-63184ff1-e886-4df9-b5d4-5c45857f6629 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.768694] env[68638]: DEBUG nova.network.neutron [req-df1a0630-deef-4034-849b-68b8a7f3a39c req-5a179de9-4b15-428e-931f-016e2283cf81 service nova] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 650.824997] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833189, 'name': CreateVM_Task, 'duration_secs': 0.317075} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.827098] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 650.827910] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.828094] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 650.828501] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 650.828725] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f8beb79-89fe-4e6b-af8f-9450bcb8ed45 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.842359] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 650.842359] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e052af-b5e0-6fd1-7d12-ecc6f6a5bacf" [ 650.842359] env[68638]: _type = "Task" [ 650.842359] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.855194] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e052af-b5e0-6fd1-7d12-ecc6f6a5bacf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.934215] env[68638]: DEBUG nova.compute.manager [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 650.967276] env[68638]: DEBUG nova.compute.manager [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 650.989562] env[68638]: DEBUG nova.network.neutron [req-df1a0630-deef-4034-849b-68b8a7f3a39c req-5a179de9-4b15-428e-931f-016e2283cf81 service nova] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.181681] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c1810d-a2cb-46b1-b59f-9f6a8f703f04 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.190394] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-526d59d3-6460-4e6f-9ea1-341276659f38 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.236747] env[68638]: DEBUG nova.network.neutron [req-52178e51-b7fc-4473-b203-7b28803ec854 req-917b68bf-9fa3-4e1a-acc0-a3a61f704b52 service nova] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Updated VIF entry in instance network info cache for port 1a99ea84-dd8a-40bf-bfb2-405fdd3a9919. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 651.237150] env[68638]: DEBUG nova.network.neutron [req-52178e51-b7fc-4473-b203-7b28803ec854 req-917b68bf-9fa3-4e1a-acc0-a3a61f704b52 service nova] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Updating instance_info_cache with network_info: [{"id": "1a99ea84-dd8a-40bf-bfb2-405fdd3a9919", "address": "fa:16:3e:56:1c:f4", "network": {"id": "2dc0e495-5a5a-47e0-8c1c-61e000194cc0", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-658812124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "373459ee626847e9886e5ff353729280", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1a99ea84-dd", "ovs_interfaceid": "1a99ea84-dd8a-40bf-bfb2-405fdd3a9919", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.239303] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd3769ee-5b59-4338-85ae-22d9d6647106 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.248779] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad9fd5f-9f24-408e-b590-53be2c6ecdc2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.264932] env[68638]: DEBUG nova.compute.provider_tree [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 651.356468] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e052af-b5e0-6fd1-7d12-ecc6f6a5bacf, 'name': SearchDatastore_Task, 'duration_secs': 0.01729} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.356468] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 651.357015] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 651.360519] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.360519] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 651.361095] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 651.361790] env[68638]: DEBUG nova.network.neutron [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Successfully created port: e2ae854b-e47d-4298-8a7e-1a2f6c3e3206 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 651.365242] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0960f0dc-2dec-4dc7-a846-112761d1c1d5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.375118] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 651.375118] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 651.375118] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15e01b67-2da4-4a0b-9c0b-3ea0937f8ed8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.382080] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 651.382080] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5286cca2-c89f-daae-e86e-c5646dab0f40" [ 651.382080] env[68638]: _type = "Task" [ 651.382080] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.390281] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5286cca2-c89f-daae-e86e-c5646dab0f40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.488890] env[68638]: DEBUG oslo_concurrency.lockutils [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.493131] env[68638]: DEBUG oslo_concurrency.lockutils [req-df1a0630-deef-4034-849b-68b8a7f3a39c req-5a179de9-4b15-428e-931f-016e2283cf81 service nova] Releasing lock "refresh_cache-1eee31b7-db8b-4765-8cc2-4273717ef86e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 651.493131] env[68638]: DEBUG oslo_concurrency.lockutils [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Acquired lock "refresh_cache-1eee31b7-db8b-4765-8cc2-4273717ef86e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 651.493131] env[68638]: DEBUG nova.network.neutron [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 651.744483] env[68638]: DEBUG oslo_concurrency.lockutils [req-52178e51-b7fc-4473-b203-7b28803ec854 req-917b68bf-9fa3-4e1a-acc0-a3a61f704b52 service nova] Releasing lock "refresh_cache-f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 651.772237] env[68638]: DEBUG nova.scheduler.client.report [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 651.899021] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5286cca2-c89f-daae-e86e-c5646dab0f40, 'name': SearchDatastore_Task, 'duration_secs': 0.013431} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.899021] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68a649d3-8efa-43d8-873f-a3a61bbc728f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.905743] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 651.905743] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52a82806-be13-183d-e28c-c37f91566130" [ 651.905743] env[68638]: _type = "Task" [ 651.905743] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.915943] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a82806-be13-183d-e28c-c37f91566130, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.945989] env[68638]: DEBUG nova.compute.manager [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 651.975252] env[68638]: DEBUG nova.virt.hardware [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 651.976276] env[68638]: DEBUG nova.virt.hardware [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 651.976732] env[68638]: DEBUG nova.virt.hardware [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 651.977064] env[68638]: DEBUG nova.virt.hardware [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 651.977442] env[68638]: DEBUG nova.virt.hardware [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 651.977730] env[68638]: DEBUG nova.virt.hardware [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 651.978203] env[68638]: DEBUG nova.virt.hardware [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 651.979057] env[68638]: DEBUG nova.virt.hardware [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 651.979515] env[68638]: DEBUG nova.virt.hardware [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 651.979940] env[68638]: DEBUG nova.virt.hardware [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 651.980297] env[68638]: DEBUG nova.virt.hardware [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 651.981629] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9edb004-1404-423f-813a-7c647e914bb6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.992811] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8c59eb8-20d8-4839-8abd-dcd6df6b448a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.035807] env[68638]: DEBUG nova.network.neutron [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 652.229731] env[68638]: DEBUG nova.network.neutron [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Updating instance_info_cache with network_info: [{"id": "e76019da-d59a-45b4-a8e3-6fcded54f7b8", "address": "fa:16:3e:7a:31:9e", "network": {"id": "57ebfe6e-057a-4602-a7df-a5e0aeb0da16", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1876804051-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66afa9ca42294c8e9e8d913b14e4a209", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape76019da-d5", "ovs_interfaceid": "e76019da-d59a-45b4-a8e3-6fcded54f7b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.278042] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.359s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 652.281443] env[68638]: DEBUG oslo_concurrency.lockutils [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.485s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 652.281998] env[68638]: DEBUG nova.objects.instance [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lazy-loading 'resources' on Instance uuid 8f841b29-0156-414e-8467-c9a9393cdae9 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 652.310734] env[68638]: INFO nova.scheduler.client.report [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Deleted allocations for instance f767af17-f2bb-461d-9e7f-9c62b5504257 [ 652.418654] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a82806-be13-183d-e28c-c37f91566130, 'name': SearchDatastore_Task, 'duration_secs': 0.010454} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.418924] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 652.419217] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8/f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 652.419762] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-60c95076-4f6f-4dd8-9bb5-e746ad73d8b2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.427271] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 652.427271] env[68638]: value = "task-2833191" [ 652.427271] env[68638]: _type = "Task" [ 652.427271] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.441537] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833191, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.735665] env[68638]: DEBUG oslo_concurrency.lockutils [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Releasing lock "refresh_cache-1eee31b7-db8b-4765-8cc2-4273717ef86e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 652.736587] env[68638]: DEBUG nova.compute.manager [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Instance network_info: |[{"id": "e76019da-d59a-45b4-a8e3-6fcded54f7b8", "address": "fa:16:3e:7a:31:9e", "network": {"id": "57ebfe6e-057a-4602-a7df-a5e0aeb0da16", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1876804051-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66afa9ca42294c8e9e8d913b14e4a209", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape76019da-d5", "ovs_interfaceid": "e76019da-d59a-45b4-a8e3-6fcded54f7b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 652.737486] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Acquiring lock "5294e1b6-f34f-4f91-aa3e-e0276ad982ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 652.738163] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Lock "5294e1b6-f34f-4f91-aa3e-e0276ad982ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 652.739337] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:31:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cca1f087-01e1-49ca-831b-5c51478a5d60', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e76019da-d59a-45b4-a8e3-6fcded54f7b8', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 652.753917] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Creating folder: Project (66afa9ca42294c8e9e8d913b14e4a209). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 652.755686] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4fc9e82a-cc72-4e62-ac75-a269e6390ee4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.759746] env[68638]: DEBUG nova.compute.manager [req-3d990f4f-f3bd-44d8-abc6-ef8f5f50add4 req-b08ce0d5-4bd2-4419-bdad-f33e0675765c service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Received event network-changed-63f69876-6edd-4869-b1f4-40bf4dd16383 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 652.760069] env[68638]: DEBUG nova.compute.manager [req-3d990f4f-f3bd-44d8-abc6-ef8f5f50add4 req-b08ce0d5-4bd2-4419-bdad-f33e0675765c service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Refreshing instance network info cache due to event network-changed-63f69876-6edd-4869-b1f4-40bf4dd16383. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 652.760453] env[68638]: DEBUG oslo_concurrency.lockutils [req-3d990f4f-f3bd-44d8-abc6-ef8f5f50add4 req-b08ce0d5-4bd2-4419-bdad-f33e0675765c service nova] Acquiring lock "refresh_cache-e3cf739a-3104-473d-af66-d9974ed1a222" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.760710] env[68638]: DEBUG oslo_concurrency.lockutils [req-3d990f4f-f3bd-44d8-abc6-ef8f5f50add4 req-b08ce0d5-4bd2-4419-bdad-f33e0675765c service nova] Acquired lock "refresh_cache-e3cf739a-3104-473d-af66-d9974ed1a222" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 652.761213] env[68638]: DEBUG nova.network.neutron [req-3d990f4f-f3bd-44d8-abc6-ef8f5f50add4 req-b08ce0d5-4bd2-4419-bdad-f33e0675765c service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Refreshing network info cache for port 63f69876-6edd-4869-b1f4-40bf4dd16383 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 652.774027] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Created folder: Project (66afa9ca42294c8e9e8d913b14e4a209) in parent group-v569734. [ 652.774541] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Creating folder: Instances. Parent ref: group-v569807. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 652.775180] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81e2c0ef-8fe7-4276-ad56-a3b517b6c5a3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.790244] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Created folder: Instances in parent group-v569807. [ 652.790605] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 652.792163] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 652.792910] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9969d55-a9c4-4207-8724-0b0c76efd9a6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.820861] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 652.820861] env[68638]: value = "task-2833194" [ 652.820861] env[68638]: _type = "Task" [ 652.820861] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.821511] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1bb79f8d-f349-4969-bbdb-1f559e767b43 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "f767af17-f2bb-461d-9e7f-9c62b5504257" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.299s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 652.845756] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833194, 'name': CreateVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.944053] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833191, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.339689] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833194, 'name': CreateVM_Task, 'duration_secs': 0.49857} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.339689] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 653.340775] env[68638]: DEBUG oslo_concurrency.lockutils [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.340775] env[68638]: DEBUG oslo_concurrency.lockutils [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 653.340874] env[68638]: DEBUG oslo_concurrency.lockutils [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 653.342035] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c7fa582-d142-42a0-8bb2-304bed13666a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.346549] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Waiting for the task: (returnval){ [ 653.346549] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]523b1fee-edf8-ce12-8155-6de66d0db696" [ 653.346549] env[68638]: _type = "Task" [ 653.346549] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.357551] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523b1fee-edf8-ce12-8155-6de66d0db696, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.443790] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833191, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.571349} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.444215] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8/f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 653.444840] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 653.445169] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7dc334c0-495f-4bcc-9e54-b2d134b0f7f4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.462155] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 653.462155] env[68638]: value = "task-2833195" [ 653.462155] env[68638]: _type = "Task" [ 653.462155] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.472369] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833195, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.493493] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18b812c6-2e38-468b-8328-197bfdbb1510 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.501342] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f2ebe3-2c2c-44a6-b119-faa9b7ea3976 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.539149] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88052f11-12a1-49b7-bcb6-c91954ed0e73 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.547812] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8953036-54ce-4764-ac23-280be272976b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.562645] env[68638]: DEBUG nova.compute.provider_tree [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 653.594406] env[68638]: DEBUG nova.network.neutron [req-3d990f4f-f3bd-44d8-abc6-ef8f5f50add4 req-b08ce0d5-4bd2-4419-bdad-f33e0675765c service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Updated VIF entry in instance network info cache for port 63f69876-6edd-4869-b1f4-40bf4dd16383. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 653.595748] env[68638]: DEBUG nova.network.neutron [req-3d990f4f-f3bd-44d8-abc6-ef8f5f50add4 req-b08ce0d5-4bd2-4419-bdad-f33e0675765c service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Updating instance_info_cache with network_info: [{"id": "63f69876-6edd-4869-b1f4-40bf4dd16383", "address": "fa:16:3e:f8:e5:04", "network": {"id": "4ee2b8ce-01eb-4d0d-8592-64c3bbb04c98", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2125764351-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9ffb656ebf844d4b71f49b35a594d4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63f69876-6e", "ovs_interfaceid": "63f69876-6edd-4869-b1f4-40bf4dd16383", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.811466] env[68638]: DEBUG nova.network.neutron [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Successfully updated port: e2ae854b-e47d-4298-8a7e-1a2f6c3e3206 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 653.857789] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523b1fee-edf8-ce12-8155-6de66d0db696, 'name': SearchDatastore_Task, 'duration_secs': 0.012834} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.858116] env[68638]: DEBUG oslo_concurrency.lockutils [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 653.858354] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 653.858588] env[68638]: DEBUG oslo_concurrency.lockutils [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.858718] env[68638]: DEBUG oslo_concurrency.lockutils [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 653.858891] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 653.859206] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-05c11c23-6e5e-4432-b427-cac326213183 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.874512] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 653.874929] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 653.875740] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ffd21f1-b931-47a1-a05c-0e637723291b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.882997] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Waiting for the task: (returnval){ [ 653.882997] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ccfe04-aeeb-1141-f023-4ba8c8187668" [ 653.882997] env[68638]: _type = "Task" [ 653.882997] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.893130] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ccfe04-aeeb-1141-f023-4ba8c8187668, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.970231] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833195, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065375} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.970584] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 653.971414] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-350d4a93-3b1e-4772-a5c2-57033fe2dd62 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.997254] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8/f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 653.997627] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55b41601-2543-4a35-90f0-a97955108d70 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.018749] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 654.018749] env[68638]: value = "task-2833196" [ 654.018749] env[68638]: _type = "Task" [ 654.018749] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.028890] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833196, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.066120] env[68638]: DEBUG nova.scheduler.client.report [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 654.098791] env[68638]: DEBUG oslo_concurrency.lockutils [req-3d990f4f-f3bd-44d8-abc6-ef8f5f50add4 req-b08ce0d5-4bd2-4419-bdad-f33e0675765c service nova] Releasing lock "refresh_cache-e3cf739a-3104-473d-af66-d9974ed1a222" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 654.315095] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Acquiring lock "refresh_cache-ac0141c2-aef6-4edf-913a-d4a41b502c10" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.315265] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Acquired lock "refresh_cache-ac0141c2-aef6-4edf-913a-d4a41b502c10" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 654.315419] env[68638]: DEBUG nova.network.neutron [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 654.394194] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ccfe04-aeeb-1141-f023-4ba8c8187668, 'name': SearchDatastore_Task, 'duration_secs': 0.023901} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.395129] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d2ff2b1-54ce-44ea-b93b-b57d9d321745 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.401181] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Waiting for the task: (returnval){ [ 654.401181] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d0449b-9747-2337-8933-8ced771cb2c7" [ 654.401181] env[68638]: _type = "Task" [ 654.401181] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.410185] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d0449b-9747-2337-8933-8ced771cb2c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.420295] env[68638]: DEBUG nova.compute.manager [req-a6166049-e126-42ad-b8bb-47b2d396c130 req-ecd886f8-0cde-45d9-bc74-a43cab355ca6 service nova] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Received event network-vif-plugged-e2ae854b-e47d-4298-8a7e-1a2f6c3e3206 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 654.420295] env[68638]: DEBUG oslo_concurrency.lockutils [req-a6166049-e126-42ad-b8bb-47b2d396c130 req-ecd886f8-0cde-45d9-bc74-a43cab355ca6 service nova] Acquiring lock "ac0141c2-aef6-4edf-913a-d4a41b502c10-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 654.420295] env[68638]: DEBUG oslo_concurrency.lockutils [req-a6166049-e126-42ad-b8bb-47b2d396c130 req-ecd886f8-0cde-45d9-bc74-a43cab355ca6 service nova] Lock "ac0141c2-aef6-4edf-913a-d4a41b502c10-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 654.420295] env[68638]: DEBUG oslo_concurrency.lockutils [req-a6166049-e126-42ad-b8bb-47b2d396c130 req-ecd886f8-0cde-45d9-bc74-a43cab355ca6 service nova] Lock "ac0141c2-aef6-4edf-913a-d4a41b502c10-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 654.420295] env[68638]: DEBUG nova.compute.manager [req-a6166049-e126-42ad-b8bb-47b2d396c130 req-ecd886f8-0cde-45d9-bc74-a43cab355ca6 service nova] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] No waiting events found dispatching network-vif-plugged-e2ae854b-e47d-4298-8a7e-1a2f6c3e3206 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 654.420432] env[68638]: WARNING nova.compute.manager [req-a6166049-e126-42ad-b8bb-47b2d396c130 req-ecd886f8-0cde-45d9-bc74-a43cab355ca6 service nova] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Received unexpected event network-vif-plugged-e2ae854b-e47d-4298-8a7e-1a2f6c3e3206 for instance with vm_state building and task_state spawning. [ 654.534644] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833196, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.572368] env[68638]: DEBUG oslo_concurrency.lockutils [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.291s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 654.577710] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.353s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 654.578434] env[68638]: DEBUG nova.objects.instance [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lazy-loading 'resources' on Instance uuid 6cb1846a-02aa-4dc3-a573-858abf5a0bdf {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 654.614717] env[68638]: INFO nova.scheduler.client.report [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Deleted allocations for instance 8f841b29-0156-414e-8467-c9a9393cdae9 [ 654.872407] env[68638]: DEBUG nova.network.neutron [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 654.921522] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d0449b-9747-2337-8933-8ced771cb2c7, 'name': SearchDatastore_Task, 'duration_secs': 0.023083} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.921987] env[68638]: DEBUG oslo_concurrency.lockutils [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 654.922501] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 1eee31b7-db8b-4765-8cc2-4273717ef86e/1eee31b7-db8b-4765-8cc2-4273717ef86e.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 654.923154] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-29b37f2b-3b7d-4393-8a6f-3c446babad4d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.932482] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Waiting for the task: (returnval){ [ 654.932482] env[68638]: value = "task-2833197" [ 654.932482] env[68638]: _type = "Task" [ 654.932482] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.941188] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Task: {'id': task-2833197, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.033934] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833196, 'name': ReconfigVM_Task, 'duration_secs': 0.72635} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.033934] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Reconfigured VM instance instance-00000018 to attach disk [datastore1] f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8/f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 655.033934] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d3463714-714a-44db-8c66-6b0ab1f8f994 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.040023] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 655.040023] env[68638]: value = "task-2833198" [ 655.040023] env[68638]: _type = "Task" [ 655.040023] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.047527] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833198, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.096943] env[68638]: DEBUG nova.network.neutron [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Updating instance_info_cache with network_info: [{"id": "e2ae854b-e47d-4298-8a7e-1a2f6c3e3206", "address": "fa:16:3e:fc:58:d5", "network": {"id": "40924a16-ab11-4519-88a4-fd9bfad0e1dc", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-299377069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee599d8c7858456caf4df2fd39189e22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2ae854b-e4", "ovs_interfaceid": "e2ae854b-e47d-4298-8a7e-1a2f6c3e3206", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.128222] env[68638]: DEBUG oslo_concurrency.lockutils [None req-85b3c085-14cc-45bd-bf1a-1bfc531f0de4 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "8f841b29-0156-414e-8467-c9a9393cdae9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.861s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 655.442097] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Task: {'id': task-2833197, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.554184] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833198, 'name': Rename_Task, 'duration_secs': 0.229608} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.554539] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 655.554801] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c6fb14d3-d988-4542-90af-285c7d86fdf8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.564060] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 655.564060] env[68638]: value = "task-2833199" [ 655.564060] env[68638]: _type = "Task" [ 655.564060] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.576062] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833199, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.602346] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Releasing lock "refresh_cache-ac0141c2-aef6-4edf-913a-d4a41b502c10" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 655.602968] env[68638]: DEBUG nova.compute.manager [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Instance network_info: |[{"id": "e2ae854b-e47d-4298-8a7e-1a2f6c3e3206", "address": "fa:16:3e:fc:58:d5", "network": {"id": "40924a16-ab11-4519-88a4-fd9bfad0e1dc", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-299377069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee599d8c7858456caf4df2fd39189e22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2ae854b-e4", "ovs_interfaceid": "e2ae854b-e47d-4298-8a7e-1a2f6c3e3206", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 655.603204] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:58:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24210a23-d8ac-4f4f-84ac-dc0636de9a72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e2ae854b-e47d-4298-8a7e-1a2f6c3e3206', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 655.611290] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Creating folder: Project (ee599d8c7858456caf4df2fd39189e22). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 655.617190] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-91f04d46-45a6-43e8-83ee-49cb3b2ef029 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.625512] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Created folder: Project (ee599d8c7858456caf4df2fd39189e22) in parent group-v569734. [ 655.625512] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Creating folder: Instances. Parent ref: group-v569810. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 655.625512] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-99557f2e-2046-4ee0-9695-017b8587404d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.638116] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Created folder: Instances in parent group-v569810. [ 655.638116] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 655.638250] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 655.638463] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b29c6938-4ff7-4cc0-8e8f-7457d76b162b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.672215] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 655.672215] env[68638]: value = "task-2833202" [ 655.672215] env[68638]: _type = "Task" [ 655.672215] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.682638] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833202, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.753938] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b54531b-666f-493e-ae99-2fb8676258d2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.764328] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb9174f-4d45-4916-b7b1-7a06df1e64a5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.804684] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15d0ad3a-efc2-4eaf-9b33-96a000ff27d8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.818720] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b40abf-1ed7-40ec-b1c9-f143e159412a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.837339] env[68638]: DEBUG nova.compute.provider_tree [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 655.943254] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Task: {'id': task-2833197, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.5908} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.943581] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 1eee31b7-db8b-4765-8cc2-4273717ef86e/1eee31b7-db8b-4765-8cc2-4273717ef86e.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 655.943867] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 655.944221] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed28ae7b-4738-424b-971f-886cbcdde3c9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.951520] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Waiting for the task: (returnval){ [ 655.951520] env[68638]: value = "task-2833203" [ 655.951520] env[68638]: _type = "Task" [ 655.951520] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.960653] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Task: {'id': task-2833203, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.075115] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833199, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.182122] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833202, 'name': CreateVM_Task, 'duration_secs': 0.491487} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.182493] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 656.183203] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.183565] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 656.183957] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 656.184225] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a221aa50-0378-440b-bc59-cf91a2bb33c7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.189478] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Waiting for the task: (returnval){ [ 656.189478] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52156dab-c49b-d21d-d073-27143d4e3dbe" [ 656.189478] env[68638]: _type = "Task" [ 656.189478] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.197514] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52156dab-c49b-d21d-d073-27143d4e3dbe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.337012] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "8992f062-c28f-4ac8-8d0d-0c51c3784e88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.340146] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "8992f062-c28f-4ac8-8d0d-0c51c3784e88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.341629] env[68638]: DEBUG nova.scheduler.client.report [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 656.379241] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "94a33fcd-69b6-443b-9c86-5129e30b5b0d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.379506] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "94a33fcd-69b6-443b-9c86-5129e30b5b0d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.461476] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Task: {'id': task-2833203, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093875} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.461903] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 656.462930] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5675cf5-7b5e-4879-935c-597affecc228 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.489490] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] 1eee31b7-db8b-4765-8cc2-4273717ef86e/1eee31b7-db8b-4765-8cc2-4273717ef86e.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 656.489888] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66af040f-9ee5-42fc-82b2-fc4921cb2e4e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.511303] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Waiting for the task: (returnval){ [ 656.511303] env[68638]: value = "task-2833207" [ 656.511303] env[68638]: _type = "Task" [ 656.511303] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.518947] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Task: {'id': task-2833207, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.574867] env[68638]: DEBUG oslo_vmware.api [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833199, 'name': PowerOnVM_Task, 'duration_secs': 0.589702} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.575225] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 656.575440] env[68638]: INFO nova.compute.manager [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Took 10.54 seconds to spawn the instance on the hypervisor. [ 656.575619] env[68638]: DEBUG nova.compute.manager [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 656.576523] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b924d281-1cb7-4d3e-a592-985ffa7f005f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.700235] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52156dab-c49b-d21d-d073-27143d4e3dbe, 'name': SearchDatastore_Task, 'duration_secs': 0.02591} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.701080] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 656.701924] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 656.702278] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.702535] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 656.702840] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 656.703132] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a885aa17-9e85-4070-97fa-9934ccac218c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.711771] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 656.712062] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 656.712834] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34af5cd6-7c58-4053-a4ec-b677d29653ff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.718335] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Waiting for the task: (returnval){ [ 656.718335] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52549cca-7539-970d-4f93-39001d2089da" [ 656.718335] env[68638]: _type = "Task" [ 656.718335] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.726071] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52549cca-7539-970d-4f93-39001d2089da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.847849] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.270s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 656.850405] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 29.102s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.850567] env[68638]: DEBUG nova.objects.instance [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68638) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 656.881074] env[68638]: INFO nova.scheduler.client.report [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Deleted allocations for instance 6cb1846a-02aa-4dc3-a573-858abf5a0bdf [ 656.964444] env[68638]: DEBUG nova.compute.manager [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Received event network-changed-e2ae854b-e47d-4298-8a7e-1a2f6c3e3206 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 656.964709] env[68638]: DEBUG nova.compute.manager [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Refreshing instance network info cache due to event network-changed-e2ae854b-e47d-4298-8a7e-1a2f6c3e3206. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 656.965165] env[68638]: DEBUG oslo_concurrency.lockutils [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] Acquiring lock "refresh_cache-ac0141c2-aef6-4edf-913a-d4a41b502c10" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.965165] env[68638]: DEBUG oslo_concurrency.lockutils [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] Acquired lock "refresh_cache-ac0141c2-aef6-4edf-913a-d4a41b502c10" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 656.965165] env[68638]: DEBUG nova.network.neutron [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Refreshing network info cache for port e2ae854b-e47d-4298-8a7e-1a2f6c3e3206 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 657.022498] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Task: {'id': task-2833207, 'name': ReconfigVM_Task, 'duration_secs': 0.422099} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.024041] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Reconfigured VM instance instance-00000019 to attach disk [datastore1] 1eee31b7-db8b-4765-8cc2-4273717ef86e/1eee31b7-db8b-4765-8cc2-4273717ef86e.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 657.024811] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e89be98f-7f71-4e09-9f39-261f356522b2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.031909] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Waiting for the task: (returnval){ [ 657.031909] env[68638]: value = "task-2833209" [ 657.031909] env[68638]: _type = "Task" [ 657.031909] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.040595] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Task: {'id': task-2833209, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.099637] env[68638]: INFO nova.compute.manager [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Took 37.47 seconds to build instance. [ 657.230575] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52549cca-7539-970d-4f93-39001d2089da, 'name': SearchDatastore_Task, 'duration_secs': 0.010507} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.231437] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-accf7afd-64bc-451d-baed-8597350de02c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.236780] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Waiting for the task: (returnval){ [ 657.236780] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5229a6d8-54c4-a79b-b224-e15e6352938d" [ 657.236780] env[68638]: _type = "Task" [ 657.236780] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.244692] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5229a6d8-54c4-a79b-b224-e15e6352938d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.391246] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b35c78d6-14b5-496e-ae40-c37d3cf3f2e3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "6cb1846a-02aa-4dc3-a573-858abf5a0bdf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.735s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.511919] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquiring lock "168c2937-f8ce-472f-b21f-e48eed909f43" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.512222] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Lock "168c2937-f8ce-472f-b21f-e48eed909f43" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.512448] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquiring lock "168c2937-f8ce-472f-b21f-e48eed909f43-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.512617] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Lock "168c2937-f8ce-472f-b21f-e48eed909f43-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.512780] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Lock "168c2937-f8ce-472f-b21f-e48eed909f43-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.514862] env[68638]: INFO nova.compute.manager [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Terminating instance [ 657.544142] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Task: {'id': task-2833209, 'name': Rename_Task, 'duration_secs': 0.198003} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.544569] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 657.544934] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b852cef2-0cec-460f-a34e-f639db81a1ee {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.552946] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Waiting for the task: (returnval){ [ 657.552946] env[68638]: value = "task-2833210" [ 657.552946] env[68638]: _type = "Task" [ 657.552946] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.565389] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Task: {'id': task-2833210, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.604506] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4ba4d797-82f9-4175-91bb-b2fb8bf6e7f4 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.639s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.751884] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5229a6d8-54c4-a79b-b224-e15e6352938d, 'name': SearchDatastore_Task, 'duration_secs': 0.008516} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.752805] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.753176] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] ac0141c2-aef6-4edf-913a-d4a41b502c10/ac0141c2-aef6-4edf-913a-d4a41b502c10.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 657.753479] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e44dd0ff-7eea-4bba-b7ff-1dcc12501db8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.760236] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Waiting for the task: (returnval){ [ 657.760236] env[68638]: value = "task-2833211" [ 657.760236] env[68638]: _type = "Task" [ 657.760236] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.767877] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Task: {'id': task-2833211, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.792973] env[68638]: DEBUG nova.network.neutron [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Updated VIF entry in instance network info cache for port e2ae854b-e47d-4298-8a7e-1a2f6c3e3206. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 657.793407] env[68638]: DEBUG nova.network.neutron [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Updating instance_info_cache with network_info: [{"id": "e2ae854b-e47d-4298-8a7e-1a2f6c3e3206", "address": "fa:16:3e:fc:58:d5", "network": {"id": "40924a16-ab11-4519-88a4-fd9bfad0e1dc", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-299377069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee599d8c7858456caf4df2fd39189e22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2ae854b-e4", "ovs_interfaceid": "e2ae854b-e47d-4298-8a7e-1a2f6c3e3206", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.859567] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74383b20-ae4d-42b8-bc64-20281130e6b9 tempest-ServersAdmin275Test-786929497 tempest-ServersAdmin275Test-786929497-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.861248] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.384s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.865694] env[68638]: INFO nova.compute.claims [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 658.021294] env[68638]: DEBUG nova.compute.manager [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 658.021294] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 658.021294] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-317572a3-6329-456a-8baf-499c9d531fe2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.028973] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 658.028973] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6a0ac0e7-ab2a-4b47-9737-727d72b2d594 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.037048] env[68638]: DEBUG oslo_vmware.api [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Waiting for the task: (returnval){ [ 658.037048] env[68638]: value = "task-2833212" [ 658.037048] env[68638]: _type = "Task" [ 658.037048] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.045119] env[68638]: DEBUG oslo_vmware.api [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2833212, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.068196] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Task: {'id': task-2833210, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.112608] env[68638]: DEBUG nova.compute.manager [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 658.273207] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Task: {'id': task-2833211, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.296739] env[68638]: DEBUG oslo_concurrency.lockutils [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] Releasing lock "refresh_cache-ac0141c2-aef6-4edf-913a-d4a41b502c10" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 658.297036] env[68638]: DEBUG nova.compute.manager [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Received event network-changed-63f69876-6edd-4869-b1f4-40bf4dd16383 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 658.298310] env[68638]: DEBUG nova.compute.manager [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Refreshing instance network info cache due to event network-changed-63f69876-6edd-4869-b1f4-40bf4dd16383. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 658.298310] env[68638]: DEBUG oslo_concurrency.lockutils [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] Acquiring lock "refresh_cache-e3cf739a-3104-473d-af66-d9974ed1a222" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.298310] env[68638]: DEBUG oslo_concurrency.lockutils [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] Acquired lock "refresh_cache-e3cf739a-3104-473d-af66-d9974ed1a222" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 658.298310] env[68638]: DEBUG nova.network.neutron [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Refreshing network info cache for port 63f69876-6edd-4869-b1f4-40bf4dd16383 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 658.548028] env[68638]: DEBUG oslo_vmware.api [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2833212, 'name': PowerOffVM_Task, 'duration_secs': 0.392547} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.548502] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 658.548646] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 658.550033] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-be0153fc-f878-48bf-bd1f-0ebc62ddf236 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.565068] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Task: {'id': task-2833210, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.617870] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 658.617870] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 658.618098] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Deleting the datastore file [datastore2] 168c2937-f8ce-472f-b21f-e48eed909f43 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 658.620691] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-67be1522-e1da-404b-8488-322f15722228 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.632135] env[68638]: DEBUG oslo_vmware.api [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Waiting for the task: (returnval){ [ 658.632135] env[68638]: value = "task-2833214" [ 658.632135] env[68638]: _type = "Task" [ 658.632135] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.646372] env[68638]: DEBUG oslo_vmware.api [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2833214, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.648124] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 658.774081] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Task: {'id': task-2833211, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.945725} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.774520] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] ac0141c2-aef6-4edf-913a-d4a41b502c10/ac0141c2-aef6-4edf-913a-d4a41b502c10.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 658.774846] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 658.775146] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a92bafad-b1f3-4401-954f-39b054e07e1d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.787062] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Waiting for the task: (returnval){ [ 658.787062] env[68638]: value = "task-2833215" [ 658.787062] env[68638]: _type = "Task" [ 658.787062] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.806181] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Task: {'id': task-2833215, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.067667] env[68638]: DEBUG oslo_vmware.api [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Task: {'id': task-2833210, 'name': PowerOnVM_Task, 'duration_secs': 1.452172} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.070349] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 659.070564] env[68638]: INFO nova.compute.manager [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Took 9.66 seconds to spawn the instance on the hypervisor. [ 659.070743] env[68638]: DEBUG nova.compute.manager [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 659.077251] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f695c6e-c0ae-4a69-a82e-76db39df505d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.135664] env[68638]: DEBUG nova.network.neutron [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Updated VIF entry in instance network info cache for port 63f69876-6edd-4869-b1f4-40bf4dd16383. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 659.136037] env[68638]: DEBUG nova.network.neutron [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Updating instance_info_cache with network_info: [{"id": "63f69876-6edd-4869-b1f4-40bf4dd16383", "address": "fa:16:3e:f8:e5:04", "network": {"id": "4ee2b8ce-01eb-4d0d-8592-64c3bbb04c98", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2125764351-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9ffb656ebf844d4b71f49b35a594d4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63f69876-6e", "ovs_interfaceid": "63f69876-6edd-4869-b1f4-40bf4dd16383", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.147972] env[68638]: DEBUG oslo_vmware.api [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2833214, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.276926} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.152811] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 659.153049] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 659.153251] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 659.153458] env[68638]: INFO nova.compute.manager [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Took 1.14 seconds to destroy the instance on the hypervisor. [ 659.153711] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 659.154795] env[68638]: DEBUG nova.compute.manager [-] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 659.154885] env[68638]: DEBUG nova.network.neutron [-] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 659.312164] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Task: {'id': task-2833215, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091096} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.312500] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 659.313641] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f33c4467-51cb-4bf6-9d90-520f1c8138b5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.357698] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] ac0141c2-aef6-4edf-913a-d4a41b502c10/ac0141c2-aef6-4edf-913a-d4a41b502c10.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 659.361213] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66512b7d-e572-427c-a159-398e41184d05 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.382098] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Waiting for the task: (returnval){ [ 659.382098] env[68638]: value = "task-2833216" [ 659.382098] env[68638]: _type = "Task" [ 659.382098] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.398642] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Task: {'id': task-2833216, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.464884] env[68638]: DEBUG nova.compute.manager [req-3fe46793-f110-40e4-81f2-b825fac93825 req-005dda02-8422-4ece-bc0c-76853dcd3694 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Received event network-changed-63f69876-6edd-4869-b1f4-40bf4dd16383 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 659.465175] env[68638]: DEBUG nova.compute.manager [req-3fe46793-f110-40e4-81f2-b825fac93825 req-005dda02-8422-4ece-bc0c-76853dcd3694 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Refreshing instance network info cache due to event network-changed-63f69876-6edd-4869-b1f4-40bf4dd16383. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 659.465431] env[68638]: DEBUG oslo_concurrency.lockutils [req-3fe46793-f110-40e4-81f2-b825fac93825 req-005dda02-8422-4ece-bc0c-76853dcd3694 service nova] Acquiring lock "refresh_cache-e3cf739a-3104-473d-af66-d9974ed1a222" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.541642] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cfedb47-adf5-4196-b255-c5fe7e033e70 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.553044] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8186a5d8-fdbb-46e5-bee7-45d0f3b000b2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.598724] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18b79ec3-0f18-49e2-81d1-79ad44364598 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.607449] env[68638]: INFO nova.compute.manager [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Took 38.41 seconds to build instance. [ 659.617153] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e2c654-8280-48eb-9a66-e0c66d1acd3e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.635563] env[68638]: DEBUG nova.compute.provider_tree [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 659.642149] env[68638]: DEBUG oslo_concurrency.lockutils [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] Releasing lock "refresh_cache-e3cf739a-3104-473d-af66-d9974ed1a222" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 659.642374] env[68638]: DEBUG nova.compute.manager [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Received event network-changed-6a0a18ba-0a6c-47d5-9f8a-f4dced2c324f {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 659.642570] env[68638]: DEBUG nova.compute.manager [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Refreshing instance network info cache due to event network-changed-6a0a18ba-0a6c-47d5-9f8a-f4dced2c324f. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 659.642794] env[68638]: DEBUG oslo_concurrency.lockutils [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] Acquiring lock "refresh_cache-168c2937-f8ce-472f-b21f-e48eed909f43" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.642988] env[68638]: DEBUG oslo_concurrency.lockutils [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] Acquired lock "refresh_cache-168c2937-f8ce-472f-b21f-e48eed909f43" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 659.643213] env[68638]: DEBUG nova.network.neutron [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Refreshing network info cache for port 6a0a18ba-0a6c-47d5-9f8a-f4dced2c324f {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 659.644718] env[68638]: DEBUG oslo_concurrency.lockutils [req-3fe46793-f110-40e4-81f2-b825fac93825 req-005dda02-8422-4ece-bc0c-76853dcd3694 service nova] Acquired lock "refresh_cache-e3cf739a-3104-473d-af66-d9974ed1a222" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 659.644920] env[68638]: DEBUG nova.network.neutron [req-3fe46793-f110-40e4-81f2-b825fac93825 req-005dda02-8422-4ece-bc0c-76853dcd3694 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Refreshing network info cache for port 63f69876-6edd-4869-b1f4-40bf4dd16383 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 659.892825] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Task: {'id': task-2833216, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.935299] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquiring lock "2450602a-fde7-4a65-b7a2-be4195077758" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.935522] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "2450602a-fde7-4a65-b7a2-be4195077758" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.063551] env[68638]: DEBUG nova.network.neutron [-] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.111815] env[68638]: DEBUG oslo_concurrency.lockutils [None req-22453fa9-f6cf-43cd-8417-85919e47246f tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Lock "1eee31b7-db8b-4765-8cc2-4273717ef86e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.140s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.138959] env[68638]: DEBUG nova.scheduler.client.report [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 660.172895] env[68638]: INFO nova.network.neutron [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Port 6a0a18ba-0a6c-47d5-9f8a-f4dced2c324f from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 660.173205] env[68638]: DEBUG nova.network.neutron [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.392216] env[68638]: DEBUG nova.network.neutron [req-3fe46793-f110-40e4-81f2-b825fac93825 req-005dda02-8422-4ece-bc0c-76853dcd3694 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Updated VIF entry in instance network info cache for port 63f69876-6edd-4869-b1f4-40bf4dd16383. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 660.392569] env[68638]: DEBUG nova.network.neutron [req-3fe46793-f110-40e4-81f2-b825fac93825 req-005dda02-8422-4ece-bc0c-76853dcd3694 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Updating instance_info_cache with network_info: [{"id": "63f69876-6edd-4869-b1f4-40bf4dd16383", "address": "fa:16:3e:f8:e5:04", "network": {"id": "4ee2b8ce-01eb-4d0d-8592-64c3bbb04c98", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2125764351-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9ffb656ebf844d4b71f49b35a594d4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63f69876-6e", "ovs_interfaceid": "63f69876-6edd-4869-b1f4-40bf4dd16383", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.402423] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Task: {'id': task-2833216, 'name': ReconfigVM_Task, 'duration_secs': 0.765041} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.402763] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Reconfigured VM instance instance-0000001a to attach disk [datastore1] ac0141c2-aef6-4edf-913a-d4a41b502c10/ac0141c2-aef6-4edf-913a-d4a41b502c10.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 660.403587] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-39ba8294-ea33-406d-a06c-6e04f6ef7bbe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.410466] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Waiting for the task: (returnval){ [ 660.410466] env[68638]: value = "task-2833218" [ 660.410466] env[68638]: _type = "Task" [ 660.410466] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.422134] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Task: {'id': task-2833218, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.509081] env[68638]: DEBUG oslo_vmware.rw_handles [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c771b2-ea95-e03a-9836-1df2ea7bcdda/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 660.509081] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4359f2f-d08f-4bbb-b931-421ba07b95e7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.514828] env[68638]: DEBUG oslo_vmware.rw_handles [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c771b2-ea95-e03a-9836-1df2ea7bcdda/disk-0.vmdk is in state: ready. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 660.515403] env[68638]: ERROR oslo_vmware.rw_handles [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c771b2-ea95-e03a-9836-1df2ea7bcdda/disk-0.vmdk due to incomplete transfer. [ 660.515781] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e123347c-0be0-49d5-bfd2-3117d1dd69a9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.527024] env[68638]: DEBUG oslo_vmware.rw_handles [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c771b2-ea95-e03a-9836-1df2ea7bcdda/disk-0.vmdk. {{(pid=68638) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 660.529024] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Uploaded image 63fdcb23-2a1a-4cf7-a2aa-69fc8d769281 to the Glance image server {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 660.530890] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Destroying the VM {{(pid=68638) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 660.531194] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-6a6b291f-2124-4a68-8bde-28bfa1d849d2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.538036] env[68638]: DEBUG oslo_vmware.api [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Waiting for the task: (returnval){ [ 660.538036] env[68638]: value = "task-2833219" [ 660.538036] env[68638]: _type = "Task" [ 660.538036] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.547408] env[68638]: DEBUG oslo_vmware.api [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833219, 'name': Destroy_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.566158] env[68638]: INFO nova.compute.manager [-] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Took 1.41 seconds to deallocate network for instance. [ 660.615426] env[68638]: DEBUG nova.compute.manager [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 660.645181] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.784s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.645774] env[68638]: DEBUG nova.compute.manager [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 660.648732] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.908s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.650661] env[68638]: INFO nova.compute.claims [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 660.676194] env[68638]: DEBUG oslo_concurrency.lockutils [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] Releasing lock "refresh_cache-168c2937-f8ce-472f-b21f-e48eed909f43" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.676194] env[68638]: DEBUG nova.compute.manager [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Received event network-changed-6a0a18ba-0a6c-47d5-9f8a-f4dced2c324f {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 660.676388] env[68638]: DEBUG nova.compute.manager [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Refreshing instance network info cache due to event network-changed-6a0a18ba-0a6c-47d5-9f8a-f4dced2c324f. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 660.677233] env[68638]: DEBUG oslo_concurrency.lockutils [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] Acquiring lock "refresh_cache-168c2937-f8ce-472f-b21f-e48eed909f43" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.677233] env[68638]: DEBUG oslo_concurrency.lockutils [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] Acquired lock "refresh_cache-168c2937-f8ce-472f-b21f-e48eed909f43" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 660.677233] env[68638]: DEBUG nova.network.neutron [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Refreshing network info cache for port 6a0a18ba-0a6c-47d5-9f8a-f4dced2c324f {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 660.900141] env[68638]: DEBUG oslo_concurrency.lockutils [req-3fe46793-f110-40e4-81f2-b825fac93825 req-005dda02-8422-4ece-bc0c-76853dcd3694 service nova] Releasing lock "refresh_cache-e3cf739a-3104-473d-af66-d9974ed1a222" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.924867] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Task: {'id': task-2833218, 'name': Rename_Task, 'duration_secs': 0.339085} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.925351] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 660.925688] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f6415aeb-76dc-4a06-aa07-949aa4fdaa12 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.933193] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Waiting for the task: (returnval){ [ 660.933193] env[68638]: value = "task-2833220" [ 660.933193] env[68638]: _type = "Task" [ 660.933193] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.941436] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Task: {'id': task-2833220, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.049844] env[68638]: DEBUG oslo_vmware.api [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833219, 'name': Destroy_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.075134] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 661.141913] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 661.158288] env[68638]: DEBUG nova.compute.utils [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 661.159525] env[68638]: DEBUG nova.compute.manager [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 661.159930] env[68638]: DEBUG nova.network.neutron [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 661.217213] env[68638]: DEBUG nova.network.neutron [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 661.231311] env[68638]: DEBUG nova.policy [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c89e1210c3fb46d6b617655fac7c6d53', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f54818a99fac4274befb43a064c49c31', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 661.252618] env[68638]: DEBUG oslo_concurrency.lockutils [None req-851bb306-e524-42ed-a8b0-3c266e4552fd tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquiring lock "168c2937-f8ce-472f-b21f-e48eed909f43" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 661.370259] env[68638]: DEBUG nova.network.neutron [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.445494] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Task: {'id': task-2833220, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.549352] env[68638]: DEBUG oslo_vmware.api [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833219, 'name': Destroy_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.582860] env[68638]: DEBUG nova.network.neutron [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Successfully created port: c66c96e0-bc74-4336-b48c-3005a0e8be96 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 661.615209] env[68638]: DEBUG nova.compute.manager [req-f85d0785-bca7-4d70-9bc3-6b5369a42454 req-9b9d14d3-9aa0-4d59-a0b4-79d55311a228 service nova] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Received event network-vif-deleted-6a0a18ba-0a6c-47d5-9f8a-f4dced2c324f {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 661.615482] env[68638]: DEBUG nova.compute.manager [req-f85d0785-bca7-4d70-9bc3-6b5369a42454 req-9b9d14d3-9aa0-4d59-a0b4-79d55311a228 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Received event network-changed-63f69876-6edd-4869-b1f4-40bf4dd16383 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 661.615699] env[68638]: DEBUG nova.compute.manager [req-f85d0785-bca7-4d70-9bc3-6b5369a42454 req-9b9d14d3-9aa0-4d59-a0b4-79d55311a228 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Refreshing instance network info cache due to event network-changed-63f69876-6edd-4869-b1f4-40bf4dd16383. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 661.615911] env[68638]: DEBUG oslo_concurrency.lockutils [req-f85d0785-bca7-4d70-9bc3-6b5369a42454 req-9b9d14d3-9aa0-4d59-a0b4-79d55311a228 service nova] Acquiring lock "refresh_cache-e3cf739a-3104-473d-af66-d9974ed1a222" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.616134] env[68638]: DEBUG oslo_concurrency.lockutils [req-f85d0785-bca7-4d70-9bc3-6b5369a42454 req-9b9d14d3-9aa0-4d59-a0b4-79d55311a228 service nova] Acquired lock "refresh_cache-e3cf739a-3104-473d-af66-d9974ed1a222" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 661.616438] env[68638]: DEBUG nova.network.neutron [req-f85d0785-bca7-4d70-9bc3-6b5369a42454 req-9b9d14d3-9aa0-4d59-a0b4-79d55311a228 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Refreshing network info cache for port 63f69876-6edd-4869-b1f4-40bf4dd16383 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 661.661149] env[68638]: DEBUG nova.compute.manager [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 661.875059] env[68638]: DEBUG oslo_concurrency.lockutils [req-9ee2234b-97f6-4568-b51c-2a6998597931 req-0b9ac3a7-4515-4815-b78b-bb38689198c1 service nova] Releasing lock "refresh_cache-168c2937-f8ce-472f-b21f-e48eed909f43" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 661.952018] env[68638]: DEBUG oslo_vmware.api [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Task: {'id': task-2833220, 'name': PowerOnVM_Task, 'duration_secs': 0.869734} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.952018] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 661.952018] env[68638]: INFO nova.compute.manager [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Took 10.00 seconds to spawn the instance on the hypervisor. [ 661.952018] env[68638]: DEBUG nova.compute.manager [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 661.952018] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b2ea39-19b4-400e-8d24-8940c2f3ccbd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.053655] env[68638]: DEBUG oslo_vmware.api [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833219, 'name': Destroy_Task, 'duration_secs': 1.03323} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.053921] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Destroyed the VM [ 662.054172] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Deleting Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 662.054424] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-185a7f20-5def-4223-8452-98327334d611 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.061957] env[68638]: DEBUG oslo_vmware.api [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Waiting for the task: (returnval){ [ 662.061957] env[68638]: value = "task-2833222" [ 662.061957] env[68638]: _type = "Task" [ 662.061957] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.073996] env[68638]: DEBUG oslo_vmware.api [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833222, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.266922] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a8dc0e2-cca1-468b-9873-699d253062e5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.276051] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4957c727-7d7e-4e1c-a529-708396f27fa9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.307942] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c222941b-ea0b-406d-865e-eeea7246d2ce {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.315158] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7391377-23dd-4635-9a53-bb24b087e380 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.328908] env[68638]: DEBUG nova.compute.provider_tree [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 662.402748] env[68638]: DEBUG nova.network.neutron [req-f85d0785-bca7-4d70-9bc3-6b5369a42454 req-9b9d14d3-9aa0-4d59-a0b4-79d55311a228 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Updated VIF entry in instance network info cache for port 63f69876-6edd-4869-b1f4-40bf4dd16383. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 662.403149] env[68638]: DEBUG nova.network.neutron [req-f85d0785-bca7-4d70-9bc3-6b5369a42454 req-9b9d14d3-9aa0-4d59-a0b4-79d55311a228 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Updating instance_info_cache with network_info: [{"id": "63f69876-6edd-4869-b1f4-40bf4dd16383", "address": "fa:16:3e:f8:e5:04", "network": {"id": "4ee2b8ce-01eb-4d0d-8592-64c3bbb04c98", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2125764351-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9ffb656ebf844d4b71f49b35a594d4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "86a35d07-53d3-46b3-92cb-ae34236c0f41", "external-id": "nsx-vlan-transportzone-811", "segmentation_id": 811, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63f69876-6e", "ovs_interfaceid": "63f69876-6edd-4869-b1f4-40bf4dd16383", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.473025] env[68638]: INFO nova.compute.manager [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Took 37.86 seconds to build instance. [ 662.572597] env[68638]: DEBUG oslo_vmware.api [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833222, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.632419] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8b70953d-8f99-4f26-9218-bd5a121b5370 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "1946baab-bb48-4138-8db6-1f530e432c3d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 662.633326] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8b70953d-8f99-4f26-9218-bd5a121b5370 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "1946baab-bb48-4138-8db6-1f530e432c3d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 662.633771] env[68638]: DEBUG nova.compute.manager [None req-8b70953d-8f99-4f26-9218-bd5a121b5370 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 662.634718] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe3bac98-9cae-4235-a0b2-ba44f2cc9039 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.642672] env[68638]: DEBUG nova.compute.manager [None req-8b70953d-8f99-4f26-9218-bd5a121b5370 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68638) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 662.643310] env[68638]: DEBUG nova.objects.instance [None req-8b70953d-8f99-4f26-9218-bd5a121b5370 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lazy-loading 'flavor' on Instance uuid 1946baab-bb48-4138-8db6-1f530e432c3d {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 662.675759] env[68638]: DEBUG nova.compute.manager [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 662.700933] env[68638]: DEBUG nova.virt.hardware [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 662.701222] env[68638]: DEBUG nova.virt.hardware [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 662.701357] env[68638]: DEBUG nova.virt.hardware [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 662.701550] env[68638]: DEBUG nova.virt.hardware [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 662.701714] env[68638]: DEBUG nova.virt.hardware [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 662.701911] env[68638]: DEBUG nova.virt.hardware [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 662.702163] env[68638]: DEBUG nova.virt.hardware [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 662.702330] env[68638]: DEBUG nova.virt.hardware [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 662.702500] env[68638]: DEBUG nova.virt.hardware [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 662.702663] env[68638]: DEBUG nova.virt.hardware [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 662.702836] env[68638]: DEBUG nova.virt.hardware [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 662.704094] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d1d1a6-2a83-4302-91c1-b6c38e8170fa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.714142] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e173e2-5e95-4292-879c-355f8922228e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.832651] env[68638]: DEBUG nova.scheduler.client.report [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 662.908607] env[68638]: DEBUG oslo_concurrency.lockutils [req-f85d0785-bca7-4d70-9bc3-6b5369a42454 req-9b9d14d3-9aa0-4d59-a0b4-79d55311a228 service nova] Releasing lock "refresh_cache-e3cf739a-3104-473d-af66-d9974ed1a222" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 662.908890] env[68638]: DEBUG nova.compute.manager [req-f85d0785-bca7-4d70-9bc3-6b5369a42454 req-9b9d14d3-9aa0-4d59-a0b4-79d55311a228 service nova] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Received event network-changed-e76019da-d59a-45b4-a8e3-6fcded54f7b8 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 662.909077] env[68638]: DEBUG nova.compute.manager [req-f85d0785-bca7-4d70-9bc3-6b5369a42454 req-9b9d14d3-9aa0-4d59-a0b4-79d55311a228 service nova] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Refreshing instance network info cache due to event network-changed-e76019da-d59a-45b4-a8e3-6fcded54f7b8. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 662.909293] env[68638]: DEBUG oslo_concurrency.lockutils [req-f85d0785-bca7-4d70-9bc3-6b5369a42454 req-9b9d14d3-9aa0-4d59-a0b4-79d55311a228 service nova] Acquiring lock "refresh_cache-1eee31b7-db8b-4765-8cc2-4273717ef86e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.909435] env[68638]: DEBUG oslo_concurrency.lockutils [req-f85d0785-bca7-4d70-9bc3-6b5369a42454 req-9b9d14d3-9aa0-4d59-a0b4-79d55311a228 service nova] Acquired lock "refresh_cache-1eee31b7-db8b-4765-8cc2-4273717ef86e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.909594] env[68638]: DEBUG nova.network.neutron [req-f85d0785-bca7-4d70-9bc3-6b5369a42454 req-9b9d14d3-9aa0-4d59-a0b4-79d55311a228 service nova] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Refreshing network info cache for port e76019da-d59a-45b4-a8e3-6fcded54f7b8 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 662.974397] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3959067a-96b1-48be-93bc-bc42ade84e54 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Lock "ac0141c2-aef6-4edf-913a-d4a41b502c10" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.551s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 663.073601] env[68638]: DEBUG oslo_vmware.api [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833222, 'name': RemoveSnapshot_Task} progress is 26%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.340888] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.692s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 663.341490] env[68638]: DEBUG nova.compute.manager [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 663.345603] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.553s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.345964] env[68638]: DEBUG nova.objects.instance [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Lazy-loading 'resources' on Instance uuid c71693e9-aeaa-4f12-b5cf-a179e558505d {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 663.397130] env[68638]: DEBUG nova.compute.manager [req-e1c0bcce-74cf-454b-a847-c497ff6433a9 req-3009372f-19d5-4d37-b714-0707e3ab86b5 service nova] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Received event network-changed-e2ae854b-e47d-4298-8a7e-1a2f6c3e3206 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 663.397332] env[68638]: DEBUG nova.compute.manager [req-e1c0bcce-74cf-454b-a847-c497ff6433a9 req-3009372f-19d5-4d37-b714-0707e3ab86b5 service nova] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Refreshing instance network info cache due to event network-changed-e2ae854b-e47d-4298-8a7e-1a2f6c3e3206. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 663.397555] env[68638]: DEBUG oslo_concurrency.lockutils [req-e1c0bcce-74cf-454b-a847-c497ff6433a9 req-3009372f-19d5-4d37-b714-0707e3ab86b5 service nova] Acquiring lock "refresh_cache-ac0141c2-aef6-4edf-913a-d4a41b502c10" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.397698] env[68638]: DEBUG oslo_concurrency.lockutils [req-e1c0bcce-74cf-454b-a847-c497ff6433a9 req-3009372f-19d5-4d37-b714-0707e3ab86b5 service nova] Acquired lock "refresh_cache-ac0141c2-aef6-4edf-913a-d4a41b502c10" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 663.397855] env[68638]: DEBUG nova.network.neutron [req-e1c0bcce-74cf-454b-a847-c497ff6433a9 req-3009372f-19d5-4d37-b714-0707e3ab86b5 service nova] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Refreshing network info cache for port e2ae854b-e47d-4298-8a7e-1a2f6c3e3206 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 663.476328] env[68638]: DEBUG nova.compute.manager [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 663.552864] env[68638]: DEBUG nova.network.neutron [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Successfully updated port: c66c96e0-bc74-4336-b48c-3005a0e8be96 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 663.574587] env[68638]: DEBUG oslo_vmware.api [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833222, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.653900] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b70953d-8f99-4f26-9218-bd5a121b5370 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 663.654283] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8dbeae34-6b19-4d46-be9a-3d00a4b262ff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.664440] env[68638]: DEBUG oslo_vmware.api [None req-8b70953d-8f99-4f26-9218-bd5a121b5370 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 663.664440] env[68638]: value = "task-2833224" [ 663.664440] env[68638]: _type = "Task" [ 663.664440] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.673945] env[68638]: DEBUG oslo_vmware.api [None req-8b70953d-8f99-4f26-9218-bd5a121b5370 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833224, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.849292] env[68638]: DEBUG nova.compute.utils [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 663.849626] env[68638]: DEBUG nova.compute.manager [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 663.849956] env[68638]: DEBUG nova.network.neutron [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 663.908873] env[68638]: DEBUG nova.policy [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e66d8cfbe6c41bc90baaf1e7eb23a86', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ded98d5a15c54e01b752c52b88549b3e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 663.999678] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 664.007946] env[68638]: DEBUG nova.network.neutron [req-f85d0785-bca7-4d70-9bc3-6b5369a42454 req-9b9d14d3-9aa0-4d59-a0b4-79d55311a228 service nova] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Updated VIF entry in instance network info cache for port e76019da-d59a-45b4-a8e3-6fcded54f7b8. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 664.008336] env[68638]: DEBUG nova.network.neutron [req-f85d0785-bca7-4d70-9bc3-6b5369a42454 req-9b9d14d3-9aa0-4d59-a0b4-79d55311a228 service nova] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Updating instance_info_cache with network_info: [{"id": "e76019da-d59a-45b4-a8e3-6fcded54f7b8", "address": "fa:16:3e:7a:31:9e", "network": {"id": "57ebfe6e-057a-4602-a7df-a5e0aeb0da16", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1876804051-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66afa9ca42294c8e9e8d913b14e4a209", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape76019da-d5", "ovs_interfaceid": "e76019da-d59a-45b4-a8e3-6fcded54f7b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.054955] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Acquiring lock "refresh_cache-4eb4360a-46a8-440b-b300-4724c3497ff2" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.055136] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Acquired lock "refresh_cache-4eb4360a-46a8-440b-b300-4724c3497ff2" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 664.055286] env[68638]: DEBUG nova.network.neutron [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 664.076073] env[68638]: DEBUG oslo_vmware.api [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833222, 'name': RemoveSnapshot_Task, 'duration_secs': 1.519162} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.079058] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Deleted Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 664.080190] env[68638]: INFO nova.compute.manager [None req-83f6cd31-4a26-4c73-b857-5de259e619f0 tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Took 17.18 seconds to snapshot the instance on the hypervisor. [ 664.177083] env[68638]: DEBUG oslo_vmware.api [None req-8b70953d-8f99-4f26-9218-bd5a121b5370 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833224, 'name': PowerOffVM_Task, 'duration_secs': 0.272299} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.177382] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b70953d-8f99-4f26-9218-bd5a121b5370 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 664.177671] env[68638]: DEBUG nova.compute.manager [None req-8b70953d-8f99-4f26-9218-bd5a121b5370 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 664.178746] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbbcc324-930a-4921-afdb-0ae67ce71833 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.206151] env[68638]: DEBUG nova.network.neutron [req-e1c0bcce-74cf-454b-a847-c497ff6433a9 req-3009372f-19d5-4d37-b714-0707e3ab86b5 service nova] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Updated VIF entry in instance network info cache for port e2ae854b-e47d-4298-8a7e-1a2f6c3e3206. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 664.206495] env[68638]: DEBUG nova.network.neutron [req-e1c0bcce-74cf-454b-a847-c497ff6433a9 req-3009372f-19d5-4d37-b714-0707e3ab86b5 service nova] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Updating instance_info_cache with network_info: [{"id": "e2ae854b-e47d-4298-8a7e-1a2f6c3e3206", "address": "fa:16:3e:fc:58:d5", "network": {"id": "40924a16-ab11-4519-88a4-fd9bfad0e1dc", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-299377069-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee599d8c7858456caf4df2fd39189e22", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24210a23-d8ac-4f4f-84ac-dc0636de9a72", "external-id": "nsx-vlan-transportzone-257", "segmentation_id": 257, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2ae854b-e4", "ovs_interfaceid": "e2ae854b-e47d-4298-8a7e-1a2f6c3e3206", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.303772] env[68638]: DEBUG nova.network.neutron [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Successfully created port: a9fa307a-55b9-4398-b9a3-75870a0519ca {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 664.360196] env[68638]: DEBUG nova.compute.manager [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 664.480780] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-279f90f8-b957-452e-a013-1e84d14023dc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.489691] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5401fb54-038d-4e49-a439-0c7e7a7c0771 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.528429] env[68638]: DEBUG oslo_concurrency.lockutils [req-f85d0785-bca7-4d70-9bc3-6b5369a42454 req-9b9d14d3-9aa0-4d59-a0b4-79d55311a228 service nova] Releasing lock "refresh_cache-1eee31b7-db8b-4765-8cc2-4273717ef86e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.529758] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1888b4fd-f4da-4640-8e49-95440d3407eb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.537702] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa8496f-c094-4c94-b643-640ea124e953 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.553076] env[68638]: DEBUG nova.compute.provider_tree [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 664.587235] env[68638]: DEBUG nova.network.neutron [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 664.692726] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8b70953d-8f99-4f26-9218-bd5a121b5370 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "1946baab-bb48-4138-8db6-1f530e432c3d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.060s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 664.708623] env[68638]: DEBUG oslo_concurrency.lockutils [req-e1c0bcce-74cf-454b-a847-c497ff6433a9 req-3009372f-19d5-4d37-b714-0707e3ab86b5 service nova] Releasing lock "refresh_cache-ac0141c2-aef6-4edf-913a-d4a41b502c10" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.764311] env[68638]: DEBUG nova.network.neutron [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Updating instance_info_cache with network_info: [{"id": "c66c96e0-bc74-4336-b48c-3005a0e8be96", "address": "fa:16:3e:77:da:5e", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.105", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc66c96e0-bc", "ovs_interfaceid": "c66c96e0-bc74-4336-b48c-3005a0e8be96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.057315] env[68638]: DEBUG nova.scheduler.client.report [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 665.267292] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Releasing lock "refresh_cache-4eb4360a-46a8-440b-b300-4724c3497ff2" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 665.267608] env[68638]: DEBUG nova.compute.manager [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Instance network_info: |[{"id": "c66c96e0-bc74-4336-b48c-3005a0e8be96", "address": "fa:16:3e:77:da:5e", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.105", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc66c96e0-bc", "ovs_interfaceid": "c66c96e0-bc74-4336-b48c-3005a0e8be96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 665.268058] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:da:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '205fb402-8eaf-4b61-8f57-8f216024179a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c66c96e0-bc74-4336-b48c-3005a0e8be96', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 665.276650] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 665.277325] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 665.277572] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-152e2154-3cc1-4fb1-8e4d-a64a3035a9f8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.299150] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 665.299150] env[68638]: value = "task-2833225" [ 665.299150] env[68638]: _type = "Task" [ 665.299150] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.308441] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833225, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.370254] env[68638]: DEBUG nova.compute.manager [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 665.394481] env[68638]: DEBUG nova.virt.hardware [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 665.394753] env[68638]: DEBUG nova.virt.hardware [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 665.394912] env[68638]: DEBUG nova.virt.hardware [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 665.395109] env[68638]: DEBUG nova.virt.hardware [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 665.395259] env[68638]: DEBUG nova.virt.hardware [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 665.395408] env[68638]: DEBUG nova.virt.hardware [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 665.395623] env[68638]: DEBUG nova.virt.hardware [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 665.395820] env[68638]: DEBUG nova.virt.hardware [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 665.396507] env[68638]: DEBUG nova.virt.hardware [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 665.396507] env[68638]: DEBUG nova.virt.hardware [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 665.396507] env[68638]: DEBUG nova.virt.hardware [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 665.397292] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-471bd966-b85f-437b-87fa-db2e2df3837e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.405809] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc60c8cd-2f0e-41a8-891c-b9941df2db5b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.535431] env[68638]: DEBUG nova.compute.manager [req-92549503-8a78-4635-98b1-a4c577215816 req-2ceb9dbb-05ca-43f8-9e4f-c4a33788fdbb service nova] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Received event network-vif-plugged-c66c96e0-bc74-4336-b48c-3005a0e8be96 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 665.535753] env[68638]: DEBUG oslo_concurrency.lockutils [req-92549503-8a78-4635-98b1-a4c577215816 req-2ceb9dbb-05ca-43f8-9e4f-c4a33788fdbb service nova] Acquiring lock "4eb4360a-46a8-440b-b300-4724c3497ff2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.536095] env[68638]: DEBUG oslo_concurrency.lockutils [req-92549503-8a78-4635-98b1-a4c577215816 req-2ceb9dbb-05ca-43f8-9e4f-c4a33788fdbb service nova] Lock "4eb4360a-46a8-440b-b300-4724c3497ff2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.536533] env[68638]: DEBUG oslo_concurrency.lockutils [req-92549503-8a78-4635-98b1-a4c577215816 req-2ceb9dbb-05ca-43f8-9e4f-c4a33788fdbb service nova] Lock "4eb4360a-46a8-440b-b300-4724c3497ff2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 665.536706] env[68638]: DEBUG nova.compute.manager [req-92549503-8a78-4635-98b1-a4c577215816 req-2ceb9dbb-05ca-43f8-9e4f-c4a33788fdbb service nova] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] No waiting events found dispatching network-vif-plugged-c66c96e0-bc74-4336-b48c-3005a0e8be96 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 665.536949] env[68638]: WARNING nova.compute.manager [req-92549503-8a78-4635-98b1-a4c577215816 req-2ceb9dbb-05ca-43f8-9e4f-c4a33788fdbb service nova] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Received unexpected event network-vif-plugged-c66c96e0-bc74-4336-b48c-3005a0e8be96 for instance with vm_state building and task_state spawning. [ 665.537151] env[68638]: DEBUG nova.compute.manager [req-92549503-8a78-4635-98b1-a4c577215816 req-2ceb9dbb-05ca-43f8-9e4f-c4a33788fdbb service nova] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Received event network-changed-c66c96e0-bc74-4336-b48c-3005a0e8be96 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 665.537310] env[68638]: DEBUG nova.compute.manager [req-92549503-8a78-4635-98b1-a4c577215816 req-2ceb9dbb-05ca-43f8-9e4f-c4a33788fdbb service nova] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Refreshing instance network info cache due to event network-changed-c66c96e0-bc74-4336-b48c-3005a0e8be96. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 665.537497] env[68638]: DEBUG oslo_concurrency.lockutils [req-92549503-8a78-4635-98b1-a4c577215816 req-2ceb9dbb-05ca-43f8-9e4f-c4a33788fdbb service nova] Acquiring lock "refresh_cache-4eb4360a-46a8-440b-b300-4724c3497ff2" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.537631] env[68638]: DEBUG oslo_concurrency.lockutils [req-92549503-8a78-4635-98b1-a4c577215816 req-2ceb9dbb-05ca-43f8-9e4f-c4a33788fdbb service nova] Acquired lock "refresh_cache-4eb4360a-46a8-440b-b300-4724c3497ff2" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.537783] env[68638]: DEBUG nova.network.neutron [req-92549503-8a78-4635-98b1-a4c577215816 req-2ceb9dbb-05ca-43f8-9e4f-c4a33788fdbb service nova] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Refreshing network info cache for port c66c96e0-bc74-4336-b48c-3005a0e8be96 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 665.557220] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Acquiring lock "a5dedd3e-a544-4005-bc9b-0735267d6753" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.557220] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Lock "a5dedd3e-a544-4005-bc9b-0735267d6753" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.557220] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Acquiring lock "a5dedd3e-a544-4005-bc9b-0735267d6753-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.557450] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Lock "a5dedd3e-a544-4005-bc9b-0735267d6753-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.557482] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Lock "a5dedd3e-a544-4005-bc9b-0735267d6753-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 665.560313] env[68638]: INFO nova.compute.manager [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Terminating instance [ 665.562556] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.217s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 665.566672] env[68638]: DEBUG oslo_concurrency.lockutils [None req-33873217-c3a3-4d59-8e85-be969a0ae34e tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 31.253s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.593915] env[68638]: INFO nova.scheduler.client.report [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Deleted allocations for instance c71693e9-aeaa-4f12-b5cf-a179e558505d [ 665.707712] env[68638]: DEBUG nova.objects.instance [None req-0109cb43-3b01-45fc-9a40-5467e3aaaada tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lazy-loading 'flavor' on Instance uuid 1946baab-bb48-4138-8db6-1f530e432c3d {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 665.808888] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833225, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.892176] env[68638]: DEBUG nova.network.neutron [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Successfully updated port: a9fa307a-55b9-4398-b9a3-75870a0519ca {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 666.075320] env[68638]: DEBUG nova.compute.manager [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 666.075320] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 666.079460] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c94d5c2-65db-4cd1-9bb6-c85afe4e97fc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.104649] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6a69ded4-6d55-4c87-8753-4c0efe92a9c9 tempest-ServersAdmin275Test-1032667631 tempest-ServersAdmin275Test-1032667631-project-member] Lock "c71693e9-aeaa-4f12-b5cf-a179e558505d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.820s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 666.106234] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 666.106809] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6534f91f-fb94-4893-ba6e-a74f2259ba57 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.126600] env[68638]: DEBUG oslo_vmware.api [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Waiting for the task: (returnval){ [ 666.126600] env[68638]: value = "task-2833227" [ 666.126600] env[68638]: _type = "Task" [ 666.126600] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.148985] env[68638]: DEBUG oslo_vmware.api [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833227, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.220701] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0109cb43-3b01-45fc-9a40-5467e3aaaada tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "refresh_cache-1946baab-bb48-4138-8db6-1f530e432c3d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.221137] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0109cb43-3b01-45fc-9a40-5467e3aaaada tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquired lock "refresh_cache-1946baab-bb48-4138-8db6-1f530e432c3d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 666.221344] env[68638]: DEBUG nova.network.neutron [None req-0109cb43-3b01-45fc-9a40-5467e3aaaada tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 666.221530] env[68638]: DEBUG nova.objects.instance [None req-0109cb43-3b01-45fc-9a40-5467e3aaaada tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lazy-loading 'info_cache' on Instance uuid 1946baab-bb48-4138-8db6-1f530e432c3d {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 666.310067] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833225, 'name': CreateVM_Task, 'duration_secs': 0.59163} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.310262] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 666.310982] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.311162] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 666.311473] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 666.311733] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71dcfeb1-6084-4f42-a0b9-1844589e2978 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.320605] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Waiting for the task: (returnval){ [ 666.320605] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ecc38f-de61-9b0b-62f7-20934b6ab109" [ 666.320605] env[68638]: _type = "Task" [ 666.320605] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.330395] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ecc38f-de61-9b0b-62f7-20934b6ab109, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.365809] env[68638]: DEBUG nova.network.neutron [req-92549503-8a78-4635-98b1-a4c577215816 req-2ceb9dbb-05ca-43f8-9e4f-c4a33788fdbb service nova] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Updated VIF entry in instance network info cache for port c66c96e0-bc74-4336-b48c-3005a0e8be96. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 666.365809] env[68638]: DEBUG nova.network.neutron [req-92549503-8a78-4635-98b1-a4c577215816 req-2ceb9dbb-05ca-43f8-9e4f-c4a33788fdbb service nova] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Updating instance_info_cache with network_info: [{"id": "c66c96e0-bc74-4336-b48c-3005a0e8be96", "address": "fa:16:3e:77:da:5e", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.105", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc66c96e0-bc", "ovs_interfaceid": "c66c96e0-bc74-4336-b48c-3005a0e8be96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.394863] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "refresh_cache-4edaaa5d-535a-4c63-ab44-724548a0f3eb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.395267] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquired lock "refresh_cache-4edaaa5d-535a-4c63-ab44-724548a0f3eb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 666.395267] env[68638]: DEBUG nova.network.neutron [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 666.638670] env[68638]: DEBUG oslo_vmware.api [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833227, 'name': PowerOffVM_Task, 'duration_secs': 0.272421} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.641587] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 666.641789] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 666.642296] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b990d78c-f085-4775-8021-1a5555094ba3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.671030] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b27128e0-0add-4f13-85b7-a4dab33b6503 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.679135] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e7ec5bb-7fa6-4a91-b246-8e0989ea9554 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.714610] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e89e9361-b1f9-4e0e-b47c-cf3308612d38 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.716853] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 666.717060] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 666.717242] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Deleting the datastore file [datastore1] a5dedd3e-a544-4005-bc9b-0735267d6753 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 666.718465] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ff561483-2689-4795-bcb0-c386d7f5a6b8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.725631] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0308bbe-af1e-4d12-8d97-3ba072af2e1d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.730964] env[68638]: DEBUG nova.objects.base [None req-0109cb43-3b01-45fc-9a40-5467e3aaaada tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Object Instance<1946baab-bb48-4138-8db6-1f530e432c3d> lazy-loaded attributes: flavor,info_cache {{(pid=68638) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 666.732970] env[68638]: DEBUG oslo_vmware.api [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Waiting for the task: (returnval){ [ 666.732970] env[68638]: value = "task-2833229" [ 666.732970] env[68638]: _type = "Task" [ 666.732970] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.746667] env[68638]: DEBUG nova.compute.provider_tree [None req-33873217-c3a3-4d59-8e85-be969a0ae34e tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 666.753797] env[68638]: DEBUG oslo_vmware.api [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833229, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.831808] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ecc38f-de61-9b0b-62f7-20934b6ab109, 'name': SearchDatastore_Task, 'duration_secs': 0.012574} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.831808] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 666.833178] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 666.833178] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.833178] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 666.833178] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 666.833178] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0d676665-4706-4184-8d61-036ef3da2967 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.842289] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 666.842515] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 666.843354] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e5a8ec3-f14e-4fe2-9a6d-83ee67921f9d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.848830] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Waiting for the task: (returnval){ [ 666.848830] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52f8911a-f297-5455-62cb-f83c7fa8ef48" [ 666.848830] env[68638]: _type = "Task" [ 666.848830] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.856083] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f8911a-f297-5455-62cb-f83c7fa8ef48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.867786] env[68638]: DEBUG oslo_concurrency.lockutils [req-92549503-8a78-4635-98b1-a4c577215816 req-2ceb9dbb-05ca-43f8-9e4f-c4a33788fdbb service nova] Releasing lock "refresh_cache-4eb4360a-46a8-440b-b300-4724c3497ff2" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 666.957356] env[68638]: DEBUG nova.network.neutron [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 667.151523] env[68638]: DEBUG nova.network.neutron [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Updating instance_info_cache with network_info: [{"id": "a9fa307a-55b9-4398-b9a3-75870a0519ca", "address": "fa:16:3e:b1:29:81", "network": {"id": "3cca37af-f3c4-433b-875a-8e01675c3975", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1292035020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ded98d5a15c54e01b752c52b88549b3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9fa307a-55", "ovs_interfaceid": "a9fa307a-55b9-4398-b9a3-75870a0519ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.249173] env[68638]: DEBUG oslo_vmware.api [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833229, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.250764] env[68638]: DEBUG nova.scheduler.client.report [None req-33873217-c3a3-4d59-8e85-be969a0ae34e tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 667.362953] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f8911a-f297-5455-62cb-f83c7fa8ef48, 'name': SearchDatastore_Task, 'duration_secs': 0.041726} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.365943] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-426f4cdc-7ad2-4f7a-a1fc-f4907a5a000f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.371568] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Waiting for the task: (returnval){ [ 667.371568] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5262b7ff-9871-614c-87fb-ee0c3d79e3ef" [ 667.371568] env[68638]: _type = "Task" [ 667.371568] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.379742] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5262b7ff-9871-614c-87fb-ee0c3d79e3ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.507604] env[68638]: DEBUG nova.network.neutron [None req-0109cb43-3b01-45fc-9a40-5467e3aaaada tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Updating instance_info_cache with network_info: [{"id": "c94367eb-4dac-4137-92b7-00d32ad0be7c", "address": "fa:16:3e:4e:e1:ee", "network": {"id": "2dc0e495-5a5a-47e0-8c1c-61e000194cc0", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-658812124-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "373459ee626847e9886e5ff353729280", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc94367eb-4d", "ovs_interfaceid": "c94367eb-4dac-4137-92b7-00d32ad0be7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.641983] env[68638]: DEBUG nova.compute.manager [req-0ac4b75e-67d9-4b49-9ead-0f5116cdc02c req-ef8d1563-fb83-4b02-b7de-b9e94a4605d8 service nova] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Received event network-vif-plugged-a9fa307a-55b9-4398-b9a3-75870a0519ca {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 667.641983] env[68638]: DEBUG oslo_concurrency.lockutils [req-0ac4b75e-67d9-4b49-9ead-0f5116cdc02c req-ef8d1563-fb83-4b02-b7de-b9e94a4605d8 service nova] Acquiring lock "4edaaa5d-535a-4c63-ab44-724548a0f3eb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.642123] env[68638]: DEBUG oslo_concurrency.lockutils [req-0ac4b75e-67d9-4b49-9ead-0f5116cdc02c req-ef8d1563-fb83-4b02-b7de-b9e94a4605d8 service nova] Lock "4edaaa5d-535a-4c63-ab44-724548a0f3eb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.642545] env[68638]: DEBUG oslo_concurrency.lockutils [req-0ac4b75e-67d9-4b49-9ead-0f5116cdc02c req-ef8d1563-fb83-4b02-b7de-b9e94a4605d8 service nova] Lock "4edaaa5d-535a-4c63-ab44-724548a0f3eb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.642545] env[68638]: DEBUG nova.compute.manager [req-0ac4b75e-67d9-4b49-9ead-0f5116cdc02c req-ef8d1563-fb83-4b02-b7de-b9e94a4605d8 service nova] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] No waiting events found dispatching network-vif-plugged-a9fa307a-55b9-4398-b9a3-75870a0519ca {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 667.642814] env[68638]: WARNING nova.compute.manager [req-0ac4b75e-67d9-4b49-9ead-0f5116cdc02c req-ef8d1563-fb83-4b02-b7de-b9e94a4605d8 service nova] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Received unexpected event network-vif-plugged-a9fa307a-55b9-4398-b9a3-75870a0519ca for instance with vm_state building and task_state spawning. [ 667.642814] env[68638]: DEBUG nova.compute.manager [req-0ac4b75e-67d9-4b49-9ead-0f5116cdc02c req-ef8d1563-fb83-4b02-b7de-b9e94a4605d8 service nova] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Received event network-changed-a9fa307a-55b9-4398-b9a3-75870a0519ca {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 667.642907] env[68638]: DEBUG nova.compute.manager [req-0ac4b75e-67d9-4b49-9ead-0f5116cdc02c req-ef8d1563-fb83-4b02-b7de-b9e94a4605d8 service nova] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Refreshing instance network info cache due to event network-changed-a9fa307a-55b9-4398-b9a3-75870a0519ca. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 667.643196] env[68638]: DEBUG oslo_concurrency.lockutils [req-0ac4b75e-67d9-4b49-9ead-0f5116cdc02c req-ef8d1563-fb83-4b02-b7de-b9e94a4605d8 service nova] Acquiring lock "refresh_cache-4edaaa5d-535a-4c63-ab44-724548a0f3eb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.657496] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Releasing lock "refresh_cache-4edaaa5d-535a-4c63-ab44-724548a0f3eb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 667.657496] env[68638]: DEBUG nova.compute.manager [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Instance network_info: |[{"id": "a9fa307a-55b9-4398-b9a3-75870a0519ca", "address": "fa:16:3e:b1:29:81", "network": {"id": "3cca37af-f3c4-433b-875a-8e01675c3975", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1292035020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ded98d5a15c54e01b752c52b88549b3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9fa307a-55", "ovs_interfaceid": "a9fa307a-55b9-4398-b9a3-75870a0519ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 667.657666] env[68638]: DEBUG oslo_concurrency.lockutils [req-0ac4b75e-67d9-4b49-9ead-0f5116cdc02c req-ef8d1563-fb83-4b02-b7de-b9e94a4605d8 service nova] Acquired lock "refresh_cache-4edaaa5d-535a-4c63-ab44-724548a0f3eb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 667.657666] env[68638]: DEBUG nova.network.neutron [req-0ac4b75e-67d9-4b49-9ead-0f5116cdc02c req-ef8d1563-fb83-4b02-b7de-b9e94a4605d8 service nova] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Refreshing network info cache for port a9fa307a-55b9-4398-b9a3-75870a0519ca {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 667.657666] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:29:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3a80436-f7a9-431a-acec-aca3d76e3f9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a9fa307a-55b9-4398-b9a3-75870a0519ca', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 667.666950] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Creating folder: Project (ded98d5a15c54e01b752c52b88549b3e). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 667.668756] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-49f3b8d4-6cdc-4b62-943e-e364c11a573f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.682019] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Created folder: Project (ded98d5a15c54e01b752c52b88549b3e) in parent group-v569734. [ 667.682019] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Creating folder: Instances. Parent ref: group-v569818. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 667.682019] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-977eba58-5428-4834-9bdd-4f743bf9743f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.690979] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Created folder: Instances in parent group-v569818. [ 667.690979] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 667.691124] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 667.691433] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-77c6386b-1083-4cd6-ba31-f718b8ea4338 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.711008] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 667.711008] env[68638]: value = "task-2833232" [ 667.711008] env[68638]: _type = "Task" [ 667.711008] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.719130] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833232, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.746354] env[68638]: DEBUG oslo_vmware.api [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Task: {'id': task-2833229, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.547377} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.746614] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 667.746800] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 667.746984] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 667.747170] env[68638]: INFO nova.compute.manager [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Took 1.67 seconds to destroy the instance on the hypervisor. [ 667.747404] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 667.747591] env[68638]: DEBUG nova.compute.manager [-] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 667.748173] env[68638]: DEBUG nova.network.neutron [-] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 667.882225] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5262b7ff-9871-614c-87fb-ee0c3d79e3ef, 'name': SearchDatastore_Task, 'duration_secs': 0.027483} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.882492] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 667.882859] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 4eb4360a-46a8-440b-b300-4724c3497ff2/4eb4360a-46a8-440b-b300-4724c3497ff2.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 667.883216] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-50b0bc39-0f2f-411d-b3d9-f82f72c01257 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.889313] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Waiting for the task: (returnval){ [ 667.889313] env[68638]: value = "task-2833234" [ 667.889313] env[68638]: _type = "Task" [ 667.889313] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.898046] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2833234, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.010304] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0109cb43-3b01-45fc-9a40-5467e3aaaada tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Releasing lock "refresh_cache-1946baab-bb48-4138-8db6-1f530e432c3d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 668.221084] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833232, 'name': CreateVM_Task, 'duration_secs': 0.33502} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.221257] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 668.221906] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.222114] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 668.222430] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 668.222678] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7c3daee-a5da-45fc-a670-bcc2961b75e7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.227210] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 668.227210] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52be6aba-c41e-9dcf-887e-0fa4d3fb6293" [ 668.227210] env[68638]: _type = "Task" [ 668.227210] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.235688] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52be6aba-c41e-9dcf-887e-0fa4d3fb6293, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.262118] env[68638]: DEBUG oslo_concurrency.lockutils [None req-33873217-c3a3-4d59-8e85-be969a0ae34e tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.695s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.265012] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.548s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.266472] env[68638]: INFO nova.compute.claims [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 668.401860] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2833234, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.412902] env[68638]: DEBUG nova.network.neutron [req-0ac4b75e-67d9-4b49-9ead-0f5116cdc02c req-ef8d1563-fb83-4b02-b7de-b9e94a4605d8 service nova] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Updated VIF entry in instance network info cache for port a9fa307a-55b9-4398-b9a3-75870a0519ca. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 668.413421] env[68638]: DEBUG nova.network.neutron [req-0ac4b75e-67d9-4b49-9ead-0f5116cdc02c req-ef8d1563-fb83-4b02-b7de-b9e94a4605d8 service nova] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Updating instance_info_cache with network_info: [{"id": "a9fa307a-55b9-4398-b9a3-75870a0519ca", "address": "fa:16:3e:b1:29:81", "network": {"id": "3cca37af-f3c4-433b-875a-8e01675c3975", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1292035020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ded98d5a15c54e01b752c52b88549b3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9fa307a-55", "ovs_interfaceid": "a9fa307a-55b9-4398-b9a3-75870a0519ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.545253] env[68638]: DEBUG nova.network.neutron [-] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.738517] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52be6aba-c41e-9dcf-887e-0fa4d3fb6293, 'name': SearchDatastore_Task, 'duration_secs': 0.013187} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.738903] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 668.739184] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 668.739428] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.739598] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 668.739788] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 668.740155] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0650a11a-6a70-4f42-8f89-8b82873ab5e4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.764863] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 668.765225] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 668.766118] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf5a0479-f0d2-441f-bf12-4c142adbf281 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.776812] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 668.776812] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52065427-b715-de7b-fdcb-1e23a3024529" [ 668.776812] env[68638]: _type = "Task" [ 668.776812] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.788206] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52065427-b715-de7b-fdcb-1e23a3024529, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.832344] env[68638]: INFO nova.scheduler.client.report [None req-33873217-c3a3-4d59-8e85-be969a0ae34e tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Deleted allocation for migration a046c66a-9a56-4cc5-8a1c-ed2bc6ccbe27 [ 668.900240] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2833234, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.747822} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.900426] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 4eb4360a-46a8-440b-b300-4724c3497ff2/4eb4360a-46a8-440b-b300-4724c3497ff2.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 668.900639] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 668.901224] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3ed9896d-011f-4612-98a9-c9f81cdbe605 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.907836] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Waiting for the task: (returnval){ [ 668.907836] env[68638]: value = "task-2833235" [ 668.907836] env[68638]: _type = "Task" [ 668.907836] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.916340] env[68638]: DEBUG oslo_concurrency.lockutils [req-0ac4b75e-67d9-4b49-9ead-0f5116cdc02c req-ef8d1563-fb83-4b02-b7de-b9e94a4605d8 service nova] Releasing lock "refresh_cache-4edaaa5d-535a-4c63-ab44-724548a0f3eb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 668.916822] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2833235, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.017032] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-0109cb43-3b01-45fc-9a40-5467e3aaaada tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 669.017164] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-93f60e34-7af5-493e-8ee9-4401f9386dfc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.023250] env[68638]: DEBUG oslo_vmware.api [None req-0109cb43-3b01-45fc-9a40-5467e3aaaada tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 669.023250] env[68638]: value = "task-2833236" [ 669.023250] env[68638]: _type = "Task" [ 669.023250] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.031565] env[68638]: DEBUG oslo_vmware.api [None req-0109cb43-3b01-45fc-9a40-5467e3aaaada tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833236, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.047320] env[68638]: INFO nova.compute.manager [-] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Took 1.30 seconds to deallocate network for instance. [ 669.142008] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Acquiring lock "a3b06e32-2670-4381-bb91-4597bfcabaa6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 669.142008] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Lock "a3b06e32-2670-4381-bb91-4597bfcabaa6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 669.290113] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52065427-b715-de7b-fdcb-1e23a3024529, 'name': SearchDatastore_Task, 'duration_secs': 0.05568} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.290966] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd0ca4a5-6f4d-4ed4-adbe-06acc7952b5c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.296401] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 669.296401] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]523efce8-e5aa-f558-ab21-4a1a1956f560" [ 669.296401] env[68638]: _type = "Task" [ 669.296401] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.308384] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523efce8-e5aa-f558-ab21-4a1a1956f560, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.339405] env[68638]: DEBUG oslo_concurrency.lockutils [None req-33873217-c3a3-4d59-8e85-be969a0ae34e tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "7617a7b1-3b21-4d38-b090-1d35bc74637b" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 38.204s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 669.417718] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2833235, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070519} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.420437] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 669.420718] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b5c3d1-e79e-4bd9-808c-5ec450c633e1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.443985] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Reconfiguring VM instance instance-0000001b to attach disk [datastore2] 4eb4360a-46a8-440b-b300-4724c3497ff2/4eb4360a-46a8-440b-b300-4724c3497ff2.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 669.446841] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08f2e336-25c7-470a-947e-bf1859f10b8e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.471235] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Waiting for the task: (returnval){ [ 669.471235] env[68638]: value = "task-2833237" [ 669.471235] env[68638]: _type = "Task" [ 669.471235] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.481602] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2833237, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.533872] env[68638]: DEBUG oslo_vmware.api [None req-0109cb43-3b01-45fc-9a40-5467e3aaaada tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833236, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.555150] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 669.715163] env[68638]: DEBUG nova.compute.manager [req-a8dba95d-f6e0-4a12-a118-43ec69aacbb7 req-6a25e6e9-6b97-47c4-876a-98bf4e794114 service nova] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Received event network-vif-deleted-e545c157-e03b-41b1-a90a-4519cddbdfaa {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 669.809266] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523efce8-e5aa-f558-ab21-4a1a1956f560, 'name': SearchDatastore_Task, 'duration_secs': 0.028358} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.813307] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 669.813307] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 4edaaa5d-535a-4c63-ab44-724548a0f3eb/4edaaa5d-535a-4c63-ab44-724548a0f3eb.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 669.813307] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b0ab81b5-454e-4b6c-bdb5-46eb24a776e6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.822509] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 669.822509] env[68638]: value = "task-2833238" [ 669.822509] env[68638]: _type = "Task" [ 669.822509] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.837441] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833238, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.880062] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f55a4943-2255-4284-95a4-d744ad0a724f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.889418] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d3c36b0-597e-4887-8cb0-3de7bb87ff80 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.923953] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc3398f-b29f-4e43-bf32-e2d6b6c8a13b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.934020] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f4f5472-dc41-449b-8254-8dfe8fc9ba0b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.948449] env[68638]: DEBUG nova.compute.provider_tree [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 669.983560] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2833237, 'name': ReconfigVM_Task, 'duration_secs': 0.327642} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.983611] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Reconfigured VM instance instance-0000001b to attach disk [datastore2] 4eb4360a-46a8-440b-b300-4724c3497ff2/4eb4360a-46a8-440b-b300-4724c3497ff2.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 669.984265] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2233d34c-90a3-4afe-b77e-9962703dc900 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.993010] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Waiting for the task: (returnval){ [ 669.993010] env[68638]: value = "task-2833240" [ 669.993010] env[68638]: _type = "Task" [ 669.993010] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.002507] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2833240, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.035785] env[68638]: DEBUG oslo_vmware.api [None req-0109cb43-3b01-45fc-9a40-5467e3aaaada tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833236, 'name': PowerOnVM_Task, 'duration_secs': 0.654551} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.036179] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-0109cb43-3b01-45fc-9a40-5467e3aaaada tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 670.036446] env[68638]: DEBUG nova.compute.manager [None req-0109cb43-3b01-45fc-9a40-5467e3aaaada tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 670.037442] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7dc89c0-a6c7-4502-8f05-013a95704fae {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.337699] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833238, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.451652] env[68638]: DEBUG nova.scheduler.client.report [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 670.503281] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2833240, 'name': Rename_Task, 'duration_secs': 0.171283} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.503623] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 670.503888] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-edc6e62b-bdc1-4d21-b228-77e919388e53 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.511786] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Waiting for the task: (returnval){ [ 670.511786] env[68638]: value = "task-2833241" [ 670.511786] env[68638]: _type = "Task" [ 670.511786] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.520394] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2833241, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.836262] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833238, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.544447} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.836541] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 4edaaa5d-535a-4c63-ab44-724548a0f3eb/4edaaa5d-535a-4c63-ab44-724548a0f3eb.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 670.836750] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 670.836993] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2f8be360-ab13-47d6-ae97-2e2c107b6cb0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.844938] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 670.844938] env[68638]: value = "task-2833242" [ 670.844938] env[68638]: _type = "Task" [ 670.844938] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.853857] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833242, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.958247] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.693s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.958745] env[68638]: DEBUG nova.compute.manager [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 670.961510] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.924s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.962225] env[68638]: DEBUG nova.objects.instance [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Lazy-loading 'resources' on Instance uuid 421c377f-0b7a-457d-b5dd-50281c65122a {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 671.011527] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "a09c4492-34fd-4010-b547-bfb5b61f252d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 671.011759] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "a09c4492-34fd-4010-b547-bfb5b61f252d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 671.023552] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2833241, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.358617] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833242, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071551} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.358917] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 671.360941] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c7146a-fd96-465b-9020-25b6f54e6f5c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.385858] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] 4edaaa5d-535a-4c63-ab44-724548a0f3eb/4edaaa5d-535a-4c63-ab44-724548a0f3eb.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 671.386219] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f08c2d4-11eb-413a-a1c0-a7c8ec0ad936 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.407938] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 671.407938] env[68638]: value = "task-2833243" [ 671.407938] env[68638]: _type = "Task" [ 671.407938] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.416826] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833243, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.466061] env[68638]: DEBUG nova.compute.utils [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 671.471798] env[68638]: DEBUG nova.compute.manager [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 671.471798] env[68638]: DEBUG nova.network.neutron [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 671.526275] env[68638]: DEBUG oslo_vmware.api [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2833241, 'name': PowerOnVM_Task, 'duration_secs': 0.767019} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.526548] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 671.526750] env[68638]: INFO nova.compute.manager [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Took 8.85 seconds to spawn the instance on the hypervisor. [ 671.526925] env[68638]: DEBUG nova.compute.manager [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 671.530014] env[68638]: DEBUG nova.policy [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e66d8cfbe6c41bc90baaf1e7eb23a86', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ded98d5a15c54e01b752c52b88549b3e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 671.532310] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6efa1bf1-6289-4261-8074-6519aa196c3f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.915993] env[68638]: DEBUG nova.network.neutron [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Successfully created port: 3a6c2c2c-b195-47d0-a907-17dac5df15df {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 671.926021] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833243, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.973040] env[68638]: DEBUG nova.compute.manager [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 672.055785] env[68638]: INFO nova.compute.manager [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Took 40.60 seconds to build instance. [ 672.072042] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-344ddf57-924d-49fe-8304-771bdead5e1b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.083886] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74f3ff5-5da9-437c-9197-b0d6ddaad5ac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.120053] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b89ca736-f7f7-439b-9e69-54f88c1b8177 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.126071] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ca01d31-f4b3-440f-a5ba-3de32d45ab9e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.140756] env[68638]: DEBUG nova.compute.provider_tree [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 672.419106] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833243, 'name': ReconfigVM_Task, 'duration_secs': 0.539798} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.419601] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Reconfigured VM instance instance-0000001c to attach disk [datastore1] 4edaaa5d-535a-4c63-ab44-724548a0f3eb/4edaaa5d-535a-4c63-ab44-724548a0f3eb.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 672.421024] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9303fcce-83a3-4164-a00b-284adf08c31d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.427647] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 672.427647] env[68638]: value = "task-2833245" [ 672.427647] env[68638]: _type = "Task" [ 672.427647] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.436219] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833245, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.562357] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75d664ce-570c-447e-aca1-23086e5043e3 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Lock "4eb4360a-46a8-440b-b300-4724c3497ff2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.611s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.644399] env[68638]: DEBUG nova.scheduler.client.report [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 672.938454] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833245, 'name': Rename_Task, 'duration_secs': 0.179754} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.938731] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 672.938985] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8debb45e-78e8-481a-85b5-1546dc4204a3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.946121] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 672.946121] env[68638]: value = "task-2833246" [ 672.946121] env[68638]: _type = "Task" [ 672.946121] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.956868] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833246, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.983711] env[68638]: DEBUG nova.compute.manager [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 673.011139] env[68638]: DEBUG nova.virt.hardware [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 673.011414] env[68638]: DEBUG nova.virt.hardware [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 673.011575] env[68638]: DEBUG nova.virt.hardware [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 673.011754] env[68638]: DEBUG nova.virt.hardware [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 673.011899] env[68638]: DEBUG nova.virt.hardware [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 673.012090] env[68638]: DEBUG nova.virt.hardware [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 673.012320] env[68638]: DEBUG nova.virt.hardware [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 673.012480] env[68638]: DEBUG nova.virt.hardware [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 673.012645] env[68638]: DEBUG nova.virt.hardware [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 673.012804] env[68638]: DEBUG nova.virt.hardware [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 673.012975] env[68638]: DEBUG nova.virt.hardware [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 673.013829] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d24e8f0-70e9-4b57-9a66-146308931617 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.022449] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b89f57-0cd6-4bae-85d4-bcd409eb1ac5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.066634] env[68638]: DEBUG nova.compute.manager [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 673.149634] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.188s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 673.152524] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.765s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.152524] env[68638]: DEBUG nova.objects.instance [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Lazy-loading 'resources' on Instance uuid 53571ad6-1fdb-4651-8b4d-24f35ffc815a {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 673.174504] env[68638]: INFO nova.scheduler.client.report [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Deleted allocations for instance 421c377f-0b7a-457d-b5dd-50281c65122a [ 673.241781] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Acquiring lock "4eb4360a-46a8-440b-b300-4724c3497ff2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.242279] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Lock "4eb4360a-46a8-440b-b300-4724c3497ff2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.242518] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Acquiring lock "4eb4360a-46a8-440b-b300-4724c3497ff2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.242713] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Lock "4eb4360a-46a8-440b-b300-4724c3497ff2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.242883] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Lock "4eb4360a-46a8-440b-b300-4724c3497ff2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 673.245432] env[68638]: INFO nova.compute.manager [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Terminating instance [ 673.384420] env[68638]: DEBUG nova.compute.manager [req-16d69c6c-4604-41d3-adcc-c6af82c727b4 req-a9d967fd-7b21-4906-8f90-be320c1e706c service nova] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Received event network-vif-plugged-3a6c2c2c-b195-47d0-a907-17dac5df15df {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 673.384613] env[68638]: DEBUG oslo_concurrency.lockutils [req-16d69c6c-4604-41d3-adcc-c6af82c727b4 req-a9d967fd-7b21-4906-8f90-be320c1e706c service nova] Acquiring lock "9c0d1c2d-88ea-40be-aef1-43b37b4dca3e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.384829] env[68638]: DEBUG oslo_concurrency.lockutils [req-16d69c6c-4604-41d3-adcc-c6af82c727b4 req-a9d967fd-7b21-4906-8f90-be320c1e706c service nova] Lock "9c0d1c2d-88ea-40be-aef1-43b37b4dca3e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.384994] env[68638]: DEBUG oslo_concurrency.lockutils [req-16d69c6c-4604-41d3-adcc-c6af82c727b4 req-a9d967fd-7b21-4906-8f90-be320c1e706c service nova] Lock "9c0d1c2d-88ea-40be-aef1-43b37b4dca3e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 673.386079] env[68638]: DEBUG nova.compute.manager [req-16d69c6c-4604-41d3-adcc-c6af82c727b4 req-a9d967fd-7b21-4906-8f90-be320c1e706c service nova] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] No waiting events found dispatching network-vif-plugged-3a6c2c2c-b195-47d0-a907-17dac5df15df {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 673.386336] env[68638]: WARNING nova.compute.manager [req-16d69c6c-4604-41d3-adcc-c6af82c727b4 req-a9d967fd-7b21-4906-8f90-be320c1e706c service nova] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Received unexpected event network-vif-plugged-3a6c2c2c-b195-47d0-a907-17dac5df15df for instance with vm_state building and task_state spawning. [ 673.458269] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833246, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.480706] env[68638]: DEBUG nova.network.neutron [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Successfully updated port: 3a6c2c2c-b195-47d0-a907-17dac5df15df {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 673.589324] env[68638]: DEBUG oslo_concurrency.lockutils [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.593910] env[68638]: DEBUG oslo_concurrency.lockutils [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.594182] env[68638]: DEBUG oslo_concurrency.lockutils [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.594414] env[68638]: DEBUG oslo_concurrency.lockutils [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.594594] env[68638]: DEBUG oslo_concurrency.lockutils [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.594759] env[68638]: DEBUG oslo_concurrency.lockutils [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 673.596732] env[68638]: INFO nova.compute.manager [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Terminating instance [ 673.685816] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2b001eec-7bb3-4429-87d1-be6b4179df9f tempest-ServerDiagnosticsTest-674141276 tempest-ServerDiagnosticsTest-674141276-project-member] Lock "421c377f-0b7a-457d-b5dd-50281c65122a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.726s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 673.748817] env[68638]: DEBUG nova.compute.manager [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 673.750068] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 673.750248] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c502443d-08b0-45de-a6d7-faecd77abb24 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.759556] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 673.760186] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8b72303e-9200-4e91-9df3-0a29b3ac9bb6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.767689] env[68638]: DEBUG oslo_vmware.api [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Waiting for the task: (returnval){ [ 673.767689] env[68638]: value = "task-2833247" [ 673.767689] env[68638]: _type = "Task" [ 673.767689] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.780008] env[68638]: DEBUG oslo_vmware.api [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2833247, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.957908] env[68638]: DEBUG oslo_vmware.api [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833246, 'name': PowerOnVM_Task, 'duration_secs': 0.569162} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.960459] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 673.960669] env[68638]: INFO nova.compute.manager [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Took 8.59 seconds to spawn the instance on the hypervisor. [ 673.960847] env[68638]: DEBUG nova.compute.manager [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 673.961812] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81579305-9c0e-49bb-81ab-3834e215ddbb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.983893] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "refresh_cache-9c0d1c2d-88ea-40be-aef1-43b37b4dca3e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.984063] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquired lock "refresh_cache-9c0d1c2d-88ea-40be-aef1-43b37b4dca3e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.984219] env[68638]: DEBUG nova.network.neutron [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 674.102063] env[68638]: DEBUG nova.compute.manager [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 674.102646] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 674.103273] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-720f4be3-2f56-43b3-8c2c-606c61a43101 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.111608] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 674.111866] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-afebd5db-920d-4fab-8656-56539437a320 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.122314] env[68638]: DEBUG oslo_vmware.api [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 674.122314] env[68638]: value = "task-2833248" [ 674.122314] env[68638]: _type = "Task" [ 674.122314] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.134575] env[68638]: DEBUG oslo_vmware.api [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833248, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.167424] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49df3a8d-d867-4f9f-89e3-1790b561038b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.175290] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-356e55dc-d38d-4199-951e-1b6941b08b91 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.208212] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde9a74a-d40f-41b0-b471-9afdf192b0b6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.217547] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29e1a55c-40ce-4055-85d0-25f988956224 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.234019] env[68638]: DEBUG nova.compute.provider_tree [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 674.279649] env[68638]: DEBUG oslo_vmware.api [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2833247, 'name': PowerOffVM_Task, 'duration_secs': 0.183592} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.279961] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 674.280681] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 674.280681] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d1e77544-bffa-4050-9517-660a329edbac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.482160] env[68638]: INFO nova.compute.manager [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Took 42.77 seconds to build instance. [ 674.519687] env[68638]: DEBUG nova.network.neutron [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 674.635807] env[68638]: DEBUG oslo_vmware.api [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833248, 'name': PowerOffVM_Task, 'duration_secs': 0.224928} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.637128] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 674.637397] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 674.637689] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 674.637813] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 674.637944] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Deleting the datastore file [datastore2] 4eb4360a-46a8-440b-b300-4724c3497ff2 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 674.638190] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d01cd9d1-a83c-460f-bcdf-57ce25765ec2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.639739] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f84b0696-ff73-4f9e-92e2-37e372664308 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.648233] env[68638]: DEBUG oslo_vmware.api [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Waiting for the task: (returnval){ [ 674.648233] env[68638]: value = "task-2833251" [ 674.648233] env[68638]: _type = "Task" [ 674.648233] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.657479] env[68638]: DEBUG oslo_vmware.api [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2833251, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.672316] env[68638]: DEBUG nova.network.neutron [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Updating instance_info_cache with network_info: [{"id": "3a6c2c2c-b195-47d0-a907-17dac5df15df", "address": "fa:16:3e:1a:1c:ac", "network": {"id": "3cca37af-f3c4-433b-875a-8e01675c3975", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1292035020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ded98d5a15c54e01b752c52b88549b3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a6c2c2c-b1", "ovs_interfaceid": "3a6c2c2c-b195-47d0-a907-17dac5df15df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.737581] env[68638]: DEBUG nova.scheduler.client.report [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 674.795239] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 674.796033] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 674.796033] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Deleting the datastore file [datastore1] f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 674.796033] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f06b83cd-ea1b-415e-8d9c-588066fb1f4d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.803657] env[68638]: DEBUG oslo_vmware.api [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 674.803657] env[68638]: value = "task-2833252" [ 674.803657] env[68638]: _type = "Task" [ 674.803657] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.812900] env[68638]: DEBUG oslo_vmware.api [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833252, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.985552] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7aeefa3d-764c-46e8-895d-5a005852bd30 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "4edaaa5d-535a-4c63-ab44-724548a0f3eb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.144s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 675.160013] env[68638]: DEBUG oslo_vmware.api [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Task: {'id': task-2833251, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178386} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.160297] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 675.160488] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 675.160949] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 675.160949] env[68638]: INFO nova.compute.manager [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Took 1.41 seconds to destroy the instance on the hypervisor. [ 675.161146] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 675.161365] env[68638]: DEBUG nova.compute.manager [-] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 675.161485] env[68638]: DEBUG nova.network.neutron [-] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 675.175149] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Releasing lock "refresh_cache-9c0d1c2d-88ea-40be-aef1-43b37b4dca3e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 675.175319] env[68638]: DEBUG nova.compute.manager [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Instance network_info: |[{"id": "3a6c2c2c-b195-47d0-a907-17dac5df15df", "address": "fa:16:3e:1a:1c:ac", "network": {"id": "3cca37af-f3c4-433b-875a-8e01675c3975", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1292035020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ded98d5a15c54e01b752c52b88549b3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a6c2c2c-b1", "ovs_interfaceid": "3a6c2c2c-b195-47d0-a907-17dac5df15df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 675.175839] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:1c:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3a80436-f7a9-431a-acec-aca3d76e3f9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3a6c2c2c-b195-47d0-a907-17dac5df15df', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 675.183712] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 675.183939] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 675.184533] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-09534d43-9de4-4c34-adf0-c114c1108ef0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.209472] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 675.209472] env[68638]: value = "task-2833253" [ 675.209472] env[68638]: _type = "Task" [ 675.209472] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.219376] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833253, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.243616] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.091s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 675.245914] env[68638]: DEBUG oslo_concurrency.lockutils [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.886s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 675.248642] env[68638]: INFO nova.compute.claims [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 675.279182] env[68638]: INFO nova.scheduler.client.report [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Deleted allocations for instance 53571ad6-1fdb-4651-8b4d-24f35ffc815a [ 675.318674] env[68638]: DEBUG oslo_vmware.api [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833252, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156398} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.320436] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 675.320436] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 675.320579] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 675.320721] env[68638]: INFO nova.compute.manager [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Took 1.22 seconds to destroy the instance on the hypervisor. [ 675.320924] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 675.321416] env[68638]: DEBUG nova.compute.manager [-] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 675.321519] env[68638]: DEBUG nova.network.neutron [-] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 675.488949] env[68638]: DEBUG nova.compute.manager [req-7f576282-a56f-4f9c-b838-aa9d44025165 req-3e69b89d-7dd6-4a4f-a319-9c85b3e1f952 service nova] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Received event network-changed-3a6c2c2c-b195-47d0-a907-17dac5df15df {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 675.489441] env[68638]: DEBUG nova.compute.manager [req-7f576282-a56f-4f9c-b838-aa9d44025165 req-3e69b89d-7dd6-4a4f-a319-9c85b3e1f952 service nova] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Refreshing instance network info cache due to event network-changed-3a6c2c2c-b195-47d0-a907-17dac5df15df. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 675.489779] env[68638]: DEBUG oslo_concurrency.lockutils [req-7f576282-a56f-4f9c-b838-aa9d44025165 req-3e69b89d-7dd6-4a4f-a319-9c85b3e1f952 service nova] Acquiring lock "refresh_cache-9c0d1c2d-88ea-40be-aef1-43b37b4dca3e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.490435] env[68638]: DEBUG oslo_concurrency.lockutils [req-7f576282-a56f-4f9c-b838-aa9d44025165 req-3e69b89d-7dd6-4a4f-a319-9c85b3e1f952 service nova] Acquired lock "refresh_cache-9c0d1c2d-88ea-40be-aef1-43b37b4dca3e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 675.492467] env[68638]: DEBUG nova.network.neutron [req-7f576282-a56f-4f9c-b838-aa9d44025165 req-3e69b89d-7dd6-4a4f-a319-9c85b3e1f952 service nova] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Refreshing network info cache for port 3a6c2c2c-b195-47d0-a907-17dac5df15df {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 675.492467] env[68638]: DEBUG nova.compute.manager [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 675.721806] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833253, 'name': CreateVM_Task, 'duration_secs': 0.40281} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.721995] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 675.722775] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.722933] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 675.723300] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 675.723557] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8da83b0-6423-4817-8893-21296453a209 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.729512] env[68638]: DEBUG oslo_vmware.api [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 675.729512] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5240d20c-f9c4-1b61-8904-7356e718fa5f" [ 675.729512] env[68638]: _type = "Task" [ 675.729512] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.738742] env[68638]: DEBUG oslo_vmware.api [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5240d20c-f9c4-1b61-8904-7356e718fa5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.783149] env[68638]: DEBUG nova.compute.manager [req-bac9c02e-9f3c-48b3-a15c-e8304cc17129 req-db0ff8d4-c73a-4f98-9a49-8f9e8ec07bed service nova] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Received event network-vif-deleted-1a99ea84-dd8a-40bf-bfb2-405fdd3a9919 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 675.783368] env[68638]: INFO nova.compute.manager [req-bac9c02e-9f3c-48b3-a15c-e8304cc17129 req-db0ff8d4-c73a-4f98-9a49-8f9e8ec07bed service nova] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Neutron deleted interface 1a99ea84-dd8a-40bf-bfb2-405fdd3a9919; detaching it from the instance and deleting it from the info cache [ 675.783546] env[68638]: DEBUG nova.network.neutron [req-bac9c02e-9f3c-48b3-a15c-e8304cc17129 req-db0ff8d4-c73a-4f98-9a49-8f9e8ec07bed service nova] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.788022] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ad8957e9-859a-41a4-bbd9-c9e360294b1b tempest-ServerExternalEventsTest-2104329646 tempest-ServerExternalEventsTest-2104329646-project-member] Lock "53571ad6-1fdb-4651-8b4d-24f35ffc815a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.948s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 675.959764] env[68638]: DEBUG oslo_concurrency.lockutils [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Acquiring lock "039edcf8-7908-4be4-8bd3-0b55545b6f7b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 675.960067] env[68638]: DEBUG oslo_concurrency.lockutils [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Lock "039edcf8-7908-4be4-8bd3-0b55545b6f7b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 675.965867] env[68638]: DEBUG nova.network.neutron [-] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.022765] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 676.145648] env[68638]: DEBUG nova.network.neutron [-] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.240863] env[68638]: DEBUG oslo_vmware.api [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5240d20c-f9c4-1b61-8904-7356e718fa5f, 'name': SearchDatastore_Task, 'duration_secs': 0.010949} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.241379] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 676.241455] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 676.242267] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.242441] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 676.242654] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 676.242924] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-503c7227-f34d-4f87-8ea5-bbbf7f10a1e5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.253933] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 676.254228] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 676.254970] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d4dbe45-0857-4a1e-b64f-bcfb9399b994 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.266823] env[68638]: DEBUG oslo_vmware.api [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 676.266823] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e932c3-20a0-eccc-9f9c-8bfd6575c7e2" [ 676.266823] env[68638]: _type = "Task" [ 676.266823] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.281044] env[68638]: DEBUG oslo_vmware.api [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e932c3-20a0-eccc-9f9c-8bfd6575c7e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.285801] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-41e2ce9d-56b2-42c9-b0b8-e4685d1cd352 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.301362] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9da41e-b17f-4658-81d7-82a033175ce3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.345555] env[68638]: DEBUG nova.compute.manager [req-bac9c02e-9f3c-48b3-a15c-e8304cc17129 req-db0ff8d4-c73a-4f98-9a49-8f9e8ec07bed service nova] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Detach interface failed, port_id=1a99ea84-dd8a-40bf-bfb2-405fdd3a9919, reason: Instance f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 676.386788] env[68638]: DEBUG nova.network.neutron [req-7f576282-a56f-4f9c-b838-aa9d44025165 req-3e69b89d-7dd6-4a4f-a319-9c85b3e1f952 service nova] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Updated VIF entry in instance network info cache for port 3a6c2c2c-b195-47d0-a907-17dac5df15df. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 676.387319] env[68638]: DEBUG nova.network.neutron [req-7f576282-a56f-4f9c-b838-aa9d44025165 req-3e69b89d-7dd6-4a4f-a319-9c85b3e1f952 service nova] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Updating instance_info_cache with network_info: [{"id": "3a6c2c2c-b195-47d0-a907-17dac5df15df", "address": "fa:16:3e:1a:1c:ac", "network": {"id": "3cca37af-f3c4-433b-875a-8e01675c3975", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1292035020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ded98d5a15c54e01b752c52b88549b3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a6c2c2c-b1", "ovs_interfaceid": "3a6c2c2c-b195-47d0-a907-17dac5df15df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.468774] env[68638]: INFO nova.compute.manager [-] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Took 1.31 seconds to deallocate network for instance. [ 676.648443] env[68638]: INFO nova.compute.manager [-] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Took 1.33 seconds to deallocate network for instance. [ 676.778538] env[68638]: DEBUG oslo_vmware.api [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e932c3-20a0-eccc-9f9c-8bfd6575c7e2, 'name': SearchDatastore_Task, 'duration_secs': 0.01062} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.779414] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8d4a5c3-0406-41d9-982c-ad2fa60ddc2c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.790207] env[68638]: DEBUG oslo_vmware.api [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 676.790207] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52eb3e16-68ad-cb1a-f4c2-0a9f955142f5" [ 676.790207] env[68638]: _type = "Task" [ 676.790207] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.797622] env[68638]: DEBUG oslo_vmware.api [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52eb3e16-68ad-cb1a-f4c2-0a9f955142f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.890560] env[68638]: DEBUG oslo_concurrency.lockutils [req-7f576282-a56f-4f9c-b838-aa9d44025165 req-3e69b89d-7dd6-4a4f-a319-9c85b3e1f952 service nova] Releasing lock "refresh_cache-9c0d1c2d-88ea-40be-aef1-43b37b4dca3e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 676.893469] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd393fac-0e8b-484d-a8ec-aa699cb09768 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.902212] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f1e0d7a-532e-408a-a43d-4338d3396093 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.934908] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f914d40-103a-405b-884a-2b6b87d98cd9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.943532] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe24b68d-3e2a-4578-9f77-4900e6f71bf1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.958186] env[68638]: DEBUG nova.compute.provider_tree [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 676.975853] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.159603] env[68638]: DEBUG oslo_concurrency.lockutils [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.305488] env[68638]: DEBUG oslo_vmware.api [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52eb3e16-68ad-cb1a-f4c2-0a9f955142f5, 'name': SearchDatastore_Task, 'duration_secs': 0.010418} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.305793] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 677.306154] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e/9c0d1c2d-88ea-40be-aef1-43b37b4dca3e.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 677.306446] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4007c683-40b9-4d20-8624-a77baaf3a28b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.314686] env[68638]: DEBUG oslo_vmware.api [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 677.314686] env[68638]: value = "task-2833254" [ 677.314686] env[68638]: _type = "Task" [ 677.314686] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.324007] env[68638]: DEBUG oslo_vmware.api [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833254, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.461548] env[68638]: DEBUG nova.scheduler.client.report [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 677.569321] env[68638]: DEBUG nova.compute.manager [req-3498223f-6905-4e1d-afb5-6b18483ee820 req-38ca7b84-11df-44bd-9e87-b28cb2d0936f service nova] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Received event network-vif-deleted-c66c96e0-bc74-4336-b48c-3005a0e8be96 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 677.827686] env[68638]: DEBUG oslo_vmware.api [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833254, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485965} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.828013] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e/9c0d1c2d-88ea-40be-aef1-43b37b4dca3e.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 677.830023] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 677.830023] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f2369939-291a-4eef-bcdd-359aa56de543 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.836283] env[68638]: DEBUG oslo_vmware.api [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 677.836283] env[68638]: value = "task-2833255" [ 677.836283] env[68638]: _type = "Task" [ 677.836283] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.846284] env[68638]: DEBUG oslo_vmware.api [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833255, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.967447] env[68638]: DEBUG oslo_concurrency.lockutils [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.721s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 677.967987] env[68638]: DEBUG nova.compute.manager [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 677.970584] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.151s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 677.972657] env[68638]: INFO nova.compute.claims [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 678.348085] env[68638]: DEBUG oslo_vmware.api [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833255, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071787} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.348536] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 678.349707] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93ae0eae-8d44-4714-96a6-de868f6ed7b3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.378529] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Reconfiguring VM instance instance-0000001d to attach disk [datastore2] 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e/9c0d1c2d-88ea-40be-aef1-43b37b4dca3e.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 678.378991] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1976eada-2714-48da-b820-006c909aae51 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.402417] env[68638]: DEBUG oslo_vmware.api [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 678.402417] env[68638]: value = "task-2833256" [ 678.402417] env[68638]: _type = "Task" [ 678.402417] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.413743] env[68638]: DEBUG oslo_vmware.api [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833256, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.480173] env[68638]: DEBUG nova.compute.utils [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 678.482370] env[68638]: DEBUG nova.compute.manager [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 678.482547] env[68638]: DEBUG nova.network.neutron [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 678.535450] env[68638]: DEBUG nova.policy [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf59b87f634745d49969858624a7f9b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a20bc501951647abbd0c0d8e075312e2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 678.916539] env[68638]: DEBUG oslo_vmware.api [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833256, 'name': ReconfigVM_Task, 'duration_secs': 0.299209} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.916826] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Reconfigured VM instance instance-0000001d to attach disk [datastore2] 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e/9c0d1c2d-88ea-40be-aef1-43b37b4dca3e.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 678.917563] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c75c1591-0f84-40d5-82b8-2d642245fc3b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.925714] env[68638]: DEBUG oslo_vmware.api [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 678.925714] env[68638]: value = "task-2833257" [ 678.925714] env[68638]: _type = "Task" [ 678.925714] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.936897] env[68638]: DEBUG oslo_vmware.api [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833257, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.986728] env[68638]: DEBUG nova.compute.manager [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 679.209548] env[68638]: DEBUG nova.network.neutron [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Successfully created port: 24a47fde-b177-4dfe-af1b-12b1396cf1a4 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 679.463020] env[68638]: DEBUG oslo_vmware.api [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833257, 'name': Rename_Task, 'duration_secs': 0.153181} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.463020] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 679.463020] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ff70dbcf-c6d0-42a0-9096-5b3f1b39eb22 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.475760] env[68638]: DEBUG oslo_vmware.api [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 679.475760] env[68638]: value = "task-2833258" [ 679.475760] env[68638]: _type = "Task" [ 679.475760] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.490196] env[68638]: DEBUG oslo_vmware.api [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833258, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.647689] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd62b8c-d1d9-4aa1-b452-ed6e6a957d15 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.656950] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-541f4c50-93e4-465d-9f66-62da16da7db1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.701971] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-952a24c7-f881-4197-abe4-8c968c9d9373 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.711200] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d7ed44-1c8f-425c-b40a-1a308ef066af {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.730511] env[68638]: DEBUG nova.compute.provider_tree [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 679.815022] env[68638]: DEBUG nova.network.neutron [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Successfully created port: 6cc59411-51bc-4b50-8095-b1d16aac6e44 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 679.991382] env[68638]: DEBUG oslo_vmware.api [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833258, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.010166] env[68638]: DEBUG nova.compute.manager [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 680.044024] env[68638]: DEBUG nova.virt.hardware [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 680.044024] env[68638]: DEBUG nova.virt.hardware [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 680.044024] env[68638]: DEBUG nova.virt.hardware [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 680.044024] env[68638]: DEBUG nova.virt.hardware [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 680.044872] env[68638]: DEBUG nova.virt.hardware [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 680.044872] env[68638]: DEBUG nova.virt.hardware [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 680.049022] env[68638]: DEBUG nova.virt.hardware [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 680.049022] env[68638]: DEBUG nova.virt.hardware [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 680.049022] env[68638]: DEBUG nova.virt.hardware [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 680.049022] env[68638]: DEBUG nova.virt.hardware [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 680.049022] env[68638]: DEBUG nova.virt.hardware [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 680.049286] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-099675b4-bf7a-4d3b-b68f-7bb80d373ae4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.064282] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f8c6d02-242b-4e85-b36d-2341e1c57966 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.154202] env[68638]: DEBUG nova.network.neutron [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Successfully created port: 286b0758-18ef-4ab0-bf2c-05e465a216ad {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 680.233503] env[68638]: DEBUG nova.scheduler.client.report [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 680.488580] env[68638]: DEBUG oslo_vmware.api [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833258, 'name': PowerOnVM_Task, 'duration_secs': 0.528007} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.489320] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 680.489985] env[68638]: INFO nova.compute.manager [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Took 7.51 seconds to spawn the instance on the hypervisor. [ 680.490448] env[68638]: DEBUG nova.compute.manager [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 680.492173] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef156863-bbaa-429d-a02e-d8555f7235b4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.722535] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Acquiring lock "da306fdd-a5b4-4275-a482-f77cc008d780" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.722767] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Lock "da306fdd-a5b4-4275-a482-f77cc008d780" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 680.738567] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.768s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 680.739110] env[68638]: DEBUG nova.compute.manager [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 680.741655] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 37.186s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 680.741835] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 680.743918] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68638) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 680.744237] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.978s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 680.745601] env[68638]: INFO nova.compute.claims [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 680.748838] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a061747-28f9-42ed-991d-72c7a75c927f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.758551] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e71e3c-c4e1-4d53-b559-ccd737ff3b65 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.776472] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35619d6-7717-4598-99d6-3f31f057e3a5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.786393] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb07eb1d-e751-4559-8b89-d668b77d8865 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.820223] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179756MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=68638) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 680.820417] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.014023] env[68638]: INFO nova.compute.manager [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Took 46.34 seconds to build instance. [ 681.251084] env[68638]: DEBUG nova.compute.utils [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 681.252036] env[68638]: DEBUG nova.compute.manager [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 681.252254] env[68638]: DEBUG nova.network.neutron [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 681.309311] env[68638]: DEBUG nova.policy [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '847f535ec96f4ef0b73ae277199b4533', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98a35cb6ae4d4c8688fb89d7da0b2dd1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 681.514897] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b43a8c04-b66d-49cd-bfc5-972f5628889f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "9c0d1c2d-88ea-40be-aef1-43b37b4dca3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.691s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 681.749032] env[68638]: DEBUG nova.network.neutron [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Successfully created port: 822f87a6-55e6-4bdc-bdb8-9cbe45e33ce2 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 681.758181] env[68638]: DEBUG nova.compute.manager [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 682.022013] env[68638]: DEBUG nova.compute.manager [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 682.063189] env[68638]: DEBUG nova.compute.manager [req-81a2f23f-be2f-43d3-876a-6c64f47f3da1 req-b28ecc68-6b65-4a22-9955-587a0244f427 service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Received event network-vif-plugged-24a47fde-b177-4dfe-af1b-12b1396cf1a4 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 682.063313] env[68638]: DEBUG oslo_concurrency.lockutils [req-81a2f23f-be2f-43d3-876a-6c64f47f3da1 req-b28ecc68-6b65-4a22-9955-587a0244f427 service nova] Acquiring lock "aaf0185b-1a85-4e0e-afb1-55e9e2417d76-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.063525] env[68638]: DEBUG oslo_concurrency.lockutils [req-81a2f23f-be2f-43d3-876a-6c64f47f3da1 req-b28ecc68-6b65-4a22-9955-587a0244f427 service nova] Lock "aaf0185b-1a85-4e0e-afb1-55e9e2417d76-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.064090] env[68638]: DEBUG oslo_concurrency.lockutils [req-81a2f23f-be2f-43d3-876a-6c64f47f3da1 req-b28ecc68-6b65-4a22-9955-587a0244f427 service nova] Lock "aaf0185b-1a85-4e0e-afb1-55e9e2417d76-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 682.064090] env[68638]: DEBUG nova.compute.manager [req-81a2f23f-be2f-43d3-876a-6c64f47f3da1 req-b28ecc68-6b65-4a22-9955-587a0244f427 service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] No waiting events found dispatching network-vif-plugged-24a47fde-b177-4dfe-af1b-12b1396cf1a4 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 682.064090] env[68638]: WARNING nova.compute.manager [req-81a2f23f-be2f-43d3-876a-6c64f47f3da1 req-b28ecc68-6b65-4a22-9955-587a0244f427 service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Received unexpected event network-vif-plugged-24a47fde-b177-4dfe-af1b-12b1396cf1a4 for instance with vm_state building and task_state spawning. [ 682.079399] env[68638]: DEBUG nova.network.neutron [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Successfully updated port: 24a47fde-b177-4dfe-af1b-12b1396cf1a4 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 682.461816] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d45e362-dfa9-40f3-bcb7-f324824dae1f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.470894] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d109c69f-2b4d-4a3b-ba04-65cfc0afb64d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.504410] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-811d10f2-3a57-4d58-95b4-feacb36333f3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.514575] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a5f146-5590-484c-a8df-1b94f4897e49 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.532703] env[68638]: DEBUG nova.compute.provider_tree [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 682.550115] env[68638]: DEBUG oslo_concurrency.lockutils [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.773337] env[68638]: DEBUG nova.compute.manager [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 682.815019] env[68638]: DEBUG nova.virt.hardware [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 682.815019] env[68638]: DEBUG nova.virt.hardware [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 682.815019] env[68638]: DEBUG nova.virt.hardware [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 682.815019] env[68638]: DEBUG nova.virt.hardware [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 682.815298] env[68638]: DEBUG nova.virt.hardware [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 682.815298] env[68638]: DEBUG nova.virt.hardware [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 682.815298] env[68638]: DEBUG nova.virt.hardware [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 682.815298] env[68638]: DEBUG nova.virt.hardware [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 682.815298] env[68638]: DEBUG nova.virt.hardware [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 682.815436] env[68638]: DEBUG nova.virt.hardware [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 682.815436] env[68638]: DEBUG nova.virt.hardware [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 682.815436] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f1f7ea2-7f10-4a2d-b1c5-6a9e31b18b20 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.824257] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab430e7e-31a0-43e6-9e77-81a55f0e9878 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.039284] env[68638]: DEBUG nova.scheduler.client.report [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 683.131795] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "bb86aabd-129d-4c14-9db1-6676a5e7b9fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 683.132050] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "bb86aabd-129d-4c14-9db1-6676a5e7b9fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 683.380440] env[68638]: DEBUG nova.compute.manager [req-1c3460bd-8cc1-4d49-800c-8f847d714819 req-6165ab1c-1b3e-440a-9af0-9de22beb2530 service nova] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Received event network-vif-plugged-822f87a6-55e6-4bdc-bdb8-9cbe45e33ce2 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 683.380647] env[68638]: DEBUG oslo_concurrency.lockutils [req-1c3460bd-8cc1-4d49-800c-8f847d714819 req-6165ab1c-1b3e-440a-9af0-9de22beb2530 service nova] Acquiring lock "2fa9b930-c76c-4cac-a371-a6b9899dc71e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 683.381801] env[68638]: DEBUG oslo_concurrency.lockutils [req-1c3460bd-8cc1-4d49-800c-8f847d714819 req-6165ab1c-1b3e-440a-9af0-9de22beb2530 service nova] Lock "2fa9b930-c76c-4cac-a371-a6b9899dc71e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 683.382010] env[68638]: DEBUG oslo_concurrency.lockutils [req-1c3460bd-8cc1-4d49-800c-8f847d714819 req-6165ab1c-1b3e-440a-9af0-9de22beb2530 service nova] Lock "2fa9b930-c76c-4cac-a371-a6b9899dc71e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 683.382211] env[68638]: DEBUG nova.compute.manager [req-1c3460bd-8cc1-4d49-800c-8f847d714819 req-6165ab1c-1b3e-440a-9af0-9de22beb2530 service nova] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] No waiting events found dispatching network-vif-plugged-822f87a6-55e6-4bdc-bdb8-9cbe45e33ce2 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 683.382393] env[68638]: WARNING nova.compute.manager [req-1c3460bd-8cc1-4d49-800c-8f847d714819 req-6165ab1c-1b3e-440a-9af0-9de22beb2530 service nova] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Received unexpected event network-vif-plugged-822f87a6-55e6-4bdc-bdb8-9cbe45e33ce2 for instance with vm_state building and task_state spawning. [ 683.451323] env[68638]: DEBUG nova.network.neutron [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Successfully updated port: 822f87a6-55e6-4bdc-bdb8-9cbe45e33ce2 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 683.544664] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.800s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 683.545288] env[68638]: DEBUG nova.compute.manager [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 683.547872] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.621s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 683.548118] env[68638]: DEBUG nova.objects.instance [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Lazy-loading 'resources' on Instance uuid 32efc578-2cf9-4b61-bbaa-aa7031a04e33 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 683.954838] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "refresh_cache-2fa9b930-c76c-4cac-a371-a6b9899dc71e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.954838] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired lock "refresh_cache-2fa9b930-c76c-4cac-a371-a6b9899dc71e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 683.954838] env[68638]: DEBUG nova.network.neutron [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 684.051944] env[68638]: DEBUG nova.compute.utils [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 684.053443] env[68638]: DEBUG nova.compute.manager [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 684.053627] env[68638]: DEBUG nova.network.neutron [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 684.136536] env[68638]: DEBUG nova.policy [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f561b384c19247538a75ba71c979847e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd4ebea664959481685e3adc85304e174', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 684.145821] env[68638]: DEBUG nova.compute.manager [req-898e1076-c32c-4623-8a11-4d3b30ba9961 req-1d6ce75e-bc12-4d19-8a11-9f5a951d0361 service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Received event network-changed-24a47fde-b177-4dfe-af1b-12b1396cf1a4 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 684.146024] env[68638]: DEBUG nova.compute.manager [req-898e1076-c32c-4623-8a11-4d3b30ba9961 req-1d6ce75e-bc12-4d19-8a11-9f5a951d0361 service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Refreshing instance network info cache due to event network-changed-24a47fde-b177-4dfe-af1b-12b1396cf1a4. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 684.146259] env[68638]: DEBUG oslo_concurrency.lockutils [req-898e1076-c32c-4623-8a11-4d3b30ba9961 req-1d6ce75e-bc12-4d19-8a11-9f5a951d0361 service nova] Acquiring lock "refresh_cache-aaf0185b-1a85-4e0e-afb1-55e9e2417d76" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.146396] env[68638]: DEBUG oslo_concurrency.lockutils [req-898e1076-c32c-4623-8a11-4d3b30ba9961 req-1d6ce75e-bc12-4d19-8a11-9f5a951d0361 service nova] Acquired lock "refresh_cache-aaf0185b-1a85-4e0e-afb1-55e9e2417d76" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 684.146574] env[68638]: DEBUG nova.network.neutron [req-898e1076-c32c-4623-8a11-4d3b30ba9961 req-1d6ce75e-bc12-4d19-8a11-9f5a951d0361 service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Refreshing network info cache for port 24a47fde-b177-4dfe-af1b-12b1396cf1a4 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 684.476998] env[68638]: DEBUG nova.network.neutron [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Successfully created port: 19168c10-c119-4308-9487-6e17b5861113 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 684.537949] env[68638]: DEBUG nova.network.neutron [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 684.563781] env[68638]: DEBUG nova.compute.manager [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 684.575456] env[68638]: DEBUG nova.network.neutron [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Successfully updated port: 6cc59411-51bc-4b50-8095-b1d16aac6e44 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 684.716735] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dbfde94-8406-4582-8f49-03c09d61ab02 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.721154] env[68638]: DEBUG nova.network.neutron [req-898e1076-c32c-4623-8a11-4d3b30ba9961 req-1d6ce75e-bc12-4d19-8a11-9f5a951d0361 service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 684.728719] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8401dec-79b3-457f-a356-6a8b66c0d207 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.760487] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e91ea1a-fcd5-4170-bb7c-5d2de162f4f4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.769362] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15cda5cf-086e-4fa9-bdbd-2459bce2acc0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.784091] env[68638]: DEBUG nova.compute.provider_tree [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 684.894317] env[68638]: DEBUG nova.network.neutron [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Updating instance_info_cache with network_info: [{"id": "822f87a6-55e6-4bdc-bdb8-9cbe45e33ce2", "address": "fa:16:3e:c6:4b:07", "network": {"id": "ad22ed5c-0d03-45c8-8bc4-c4f51dbac4fc", "bridge": "br-int", "label": "tempest-ServersTestJSON-2147381832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98a35cb6ae4d4c8688fb89d7da0b2dd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap822f87a6-55", "ovs_interfaceid": "822f87a6-55e6-4bdc-bdb8-9cbe45e33ce2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.930440] env[68638]: DEBUG nova.network.neutron [req-898e1076-c32c-4623-8a11-4d3b30ba9961 req-1d6ce75e-bc12-4d19-8a11-9f5a951d0361 service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.287163] env[68638]: DEBUG nova.scheduler.client.report [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 685.395408] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Releasing lock "refresh_cache-2fa9b930-c76c-4cac-a371-a6b9899dc71e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 685.395729] env[68638]: DEBUG nova.compute.manager [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Instance network_info: |[{"id": "822f87a6-55e6-4bdc-bdb8-9cbe45e33ce2", "address": "fa:16:3e:c6:4b:07", "network": {"id": "ad22ed5c-0d03-45c8-8bc4-c4f51dbac4fc", "bridge": "br-int", "label": "tempest-ServersTestJSON-2147381832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98a35cb6ae4d4c8688fb89d7da0b2dd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap822f87a6-55", "ovs_interfaceid": "822f87a6-55e6-4bdc-bdb8-9cbe45e33ce2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 685.396233] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:4b:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '46e1fc20-2067-4e1a-9812-702772a2c82c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '822f87a6-55e6-4bdc-bdb8-9cbe45e33ce2', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 685.404865] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Creating folder: Project (98a35cb6ae4d4c8688fb89d7da0b2dd1). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 685.405203] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-693808b6-a094-43b9-97ab-91946b929dc0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.415185] env[68638]: DEBUG nova.compute.manager [req-7e1f60c4-2479-45d1-ae5d-94d9d9cf5d53 req-b3702d65-6384-405e-8193-5c3bd870e05f service nova] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Received event network-changed-822f87a6-55e6-4bdc-bdb8-9cbe45e33ce2 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 685.415185] env[68638]: DEBUG nova.compute.manager [req-7e1f60c4-2479-45d1-ae5d-94d9d9cf5d53 req-b3702d65-6384-405e-8193-5c3bd870e05f service nova] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Refreshing instance network info cache due to event network-changed-822f87a6-55e6-4bdc-bdb8-9cbe45e33ce2. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 685.415185] env[68638]: DEBUG oslo_concurrency.lockutils [req-7e1f60c4-2479-45d1-ae5d-94d9d9cf5d53 req-b3702d65-6384-405e-8193-5c3bd870e05f service nova] Acquiring lock "refresh_cache-2fa9b930-c76c-4cac-a371-a6b9899dc71e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.415185] env[68638]: DEBUG oslo_concurrency.lockutils [req-7e1f60c4-2479-45d1-ae5d-94d9d9cf5d53 req-b3702d65-6384-405e-8193-5c3bd870e05f service nova] Acquired lock "refresh_cache-2fa9b930-c76c-4cac-a371-a6b9899dc71e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.415185] env[68638]: DEBUG nova.network.neutron [req-7e1f60c4-2479-45d1-ae5d-94d9d9cf5d53 req-b3702d65-6384-405e-8193-5c3bd870e05f service nova] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Refreshing network info cache for port 822f87a6-55e6-4bdc-bdb8-9cbe45e33ce2 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 685.420364] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Created folder: Project (98a35cb6ae4d4c8688fb89d7da0b2dd1) in parent group-v569734. [ 685.420460] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Creating folder: Instances. Parent ref: group-v569822. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 685.420951] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3ba6b52d-9943-4fd1-9a8e-290e52590d33 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.433135] env[68638]: DEBUG oslo_concurrency.lockutils [req-898e1076-c32c-4623-8a11-4d3b30ba9961 req-1d6ce75e-bc12-4d19-8a11-9f5a951d0361 service nova] Releasing lock "refresh_cache-aaf0185b-1a85-4e0e-afb1-55e9e2417d76" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 685.433806] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Created folder: Instances in parent group-v569822. [ 685.434057] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 685.434794] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 685.435064] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-38f0e050-55ad-425a-8efa-63dfee575a19 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.461239] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 685.461239] env[68638]: value = "task-2833261" [ 685.461239] env[68638]: _type = "Task" [ 685.461239] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.466795] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833261, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.583883] env[68638]: DEBUG nova.compute.manager [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 685.616084] env[68638]: DEBUG nova.virt.hardware [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 685.616366] env[68638]: DEBUG nova.virt.hardware [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 685.617856] env[68638]: DEBUG nova.virt.hardware [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 685.617856] env[68638]: DEBUG nova.virt.hardware [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 685.617856] env[68638]: DEBUG nova.virt.hardware [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 685.617856] env[68638]: DEBUG nova.virt.hardware [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 685.618425] env[68638]: DEBUG nova.virt.hardware [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 685.619105] env[68638]: DEBUG nova.virt.hardware [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 685.619105] env[68638]: DEBUG nova.virt.hardware [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 685.619105] env[68638]: DEBUG nova.virt.hardware [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 685.619503] env[68638]: DEBUG nova.virt.hardware [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 685.620517] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44cc269c-8016-4906-88e5-d2d16d290d4a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.631797] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0be38af4-6d6a-427e-9d52-cacceb483d0c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.791690] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.244s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 685.795474] env[68638]: DEBUG oslo_concurrency.lockutils [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.306s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 685.797921] env[68638]: INFO nova.compute.claims [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 685.823893] env[68638]: INFO nova.scheduler.client.report [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Deleted allocations for instance 32efc578-2cf9-4b61-bbaa-aa7031a04e33 [ 685.973409] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833261, 'name': CreateVM_Task, 'duration_secs': 0.337626} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.973741] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 685.974525] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.974797] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.975195] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 685.975552] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a5dd6c6-d10c-4025-9dbc-d7b601d9f869 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.983015] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 685.983015] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52141b7b-dfec-64d6-da39-00b76367899c" [ 685.983015] env[68638]: _type = "Task" [ 685.983015] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.994895] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52141b7b-dfec-64d6-da39-00b76367899c, 'name': SearchDatastore_Task, 'duration_secs': 0.010982} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.995323] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 685.995647] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 685.995972] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.996229] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.996516] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 685.996865] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0744224e-2d0b-4b7a-a6f3-a4e4758f43c0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.005681] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 686.006018] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 686.007233] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7935c858-1c5b-4c5c-8665-874191b33989 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.013772] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 686.013772] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e1a74d-69da-09b7-43b8-e69c2cf4a9e9" [ 686.013772] env[68638]: _type = "Task" [ 686.013772] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.022401] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e1a74d-69da-09b7-43b8-e69c2cf4a9e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.128554] env[68638]: DEBUG nova.network.neutron [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Successfully updated port: 19168c10-c119-4308-9487-6e17b5861113 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 686.266786] env[68638]: DEBUG nova.compute.manager [req-e9839d35-1cad-479b-8cda-39cc1acf1cab req-cab914b5-dbbd-47d8-9831-4068d642c59d service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Received event network-vif-plugged-6cc59411-51bc-4b50-8095-b1d16aac6e44 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 686.266786] env[68638]: DEBUG oslo_concurrency.lockutils [req-e9839d35-1cad-479b-8cda-39cc1acf1cab req-cab914b5-dbbd-47d8-9831-4068d642c59d service nova] Acquiring lock "aaf0185b-1a85-4e0e-afb1-55e9e2417d76-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.266786] env[68638]: DEBUG oslo_concurrency.lockutils [req-e9839d35-1cad-479b-8cda-39cc1acf1cab req-cab914b5-dbbd-47d8-9831-4068d642c59d service nova] Lock "aaf0185b-1a85-4e0e-afb1-55e9e2417d76-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.266786] env[68638]: DEBUG oslo_concurrency.lockutils [req-e9839d35-1cad-479b-8cda-39cc1acf1cab req-cab914b5-dbbd-47d8-9831-4068d642c59d service nova] Lock "aaf0185b-1a85-4e0e-afb1-55e9e2417d76-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.266786] env[68638]: DEBUG nova.compute.manager [req-e9839d35-1cad-479b-8cda-39cc1acf1cab req-cab914b5-dbbd-47d8-9831-4068d642c59d service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] No waiting events found dispatching network-vif-plugged-6cc59411-51bc-4b50-8095-b1d16aac6e44 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 686.267197] env[68638]: WARNING nova.compute.manager [req-e9839d35-1cad-479b-8cda-39cc1acf1cab req-cab914b5-dbbd-47d8-9831-4068d642c59d service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Received unexpected event network-vif-plugged-6cc59411-51bc-4b50-8095-b1d16aac6e44 for instance with vm_state building and task_state spawning. [ 686.268778] env[68638]: DEBUG nova.compute.manager [req-e9839d35-1cad-479b-8cda-39cc1acf1cab req-cab914b5-dbbd-47d8-9831-4068d642c59d service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Received event network-changed-6cc59411-51bc-4b50-8095-b1d16aac6e44 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 686.268778] env[68638]: DEBUG nova.compute.manager [req-e9839d35-1cad-479b-8cda-39cc1acf1cab req-cab914b5-dbbd-47d8-9831-4068d642c59d service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Refreshing instance network info cache due to event network-changed-6cc59411-51bc-4b50-8095-b1d16aac6e44. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 686.268778] env[68638]: DEBUG oslo_concurrency.lockutils [req-e9839d35-1cad-479b-8cda-39cc1acf1cab req-cab914b5-dbbd-47d8-9831-4068d642c59d service nova] Acquiring lock "refresh_cache-aaf0185b-1a85-4e0e-afb1-55e9e2417d76" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.268778] env[68638]: DEBUG oslo_concurrency.lockutils [req-e9839d35-1cad-479b-8cda-39cc1acf1cab req-cab914b5-dbbd-47d8-9831-4068d642c59d service nova] Acquired lock "refresh_cache-aaf0185b-1a85-4e0e-afb1-55e9e2417d76" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.268778] env[68638]: DEBUG nova.network.neutron [req-e9839d35-1cad-479b-8cda-39cc1acf1cab req-cab914b5-dbbd-47d8-9831-4068d642c59d service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Refreshing network info cache for port 6cc59411-51bc-4b50-8095-b1d16aac6e44 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 686.294373] env[68638]: DEBUG nova.network.neutron [req-7e1f60c4-2479-45d1-ae5d-94d9d9cf5d53 req-b3702d65-6384-405e-8193-5c3bd870e05f service nova] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Updated VIF entry in instance network info cache for port 822f87a6-55e6-4bdc-bdb8-9cbe45e33ce2. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 686.294889] env[68638]: DEBUG nova.network.neutron [req-7e1f60c4-2479-45d1-ae5d-94d9d9cf5d53 req-b3702d65-6384-405e-8193-5c3bd870e05f service nova] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Updating instance_info_cache with network_info: [{"id": "822f87a6-55e6-4bdc-bdb8-9cbe45e33ce2", "address": "fa:16:3e:c6:4b:07", "network": {"id": "ad22ed5c-0d03-45c8-8bc4-c4f51dbac4fc", "bridge": "br-int", "label": "tempest-ServersTestJSON-2147381832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98a35cb6ae4d4c8688fb89d7da0b2dd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap822f87a6-55", "ovs_interfaceid": "822f87a6-55e6-4bdc-bdb8-9cbe45e33ce2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.336419] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fcc516a4-72a5-49d5-a64a-b302d793323d tempest-FloatingIPsAssociationNegativeTestJSON-660792595 tempest-FloatingIPsAssociationNegativeTestJSON-660792595-project-member] Lock "32efc578-2cf9-4b61-bbaa-aa7031a04e33" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.967s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.528188] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e1a74d-69da-09b7-43b8-e69c2cf4a9e9, 'name': SearchDatastore_Task, 'duration_secs': 0.009713} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.528188] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7cca0d6-254d-4413-8bf1-b89804149b40 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.533056] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 686.533056] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52cd1cbd-3599-dfc5-fe85-8eeb46de3997" [ 686.533056] env[68638]: _type = "Task" [ 686.533056] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.542586] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52cd1cbd-3599-dfc5-fe85-8eeb46de3997, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.630810] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Acquiring lock "refresh_cache-7b0b6eec-4681-4926-ad3f-5572e022a467" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.631201] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Acquired lock "refresh_cache-7b0b6eec-4681-4926-ad3f-5572e022a467" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.631793] env[68638]: DEBUG nova.network.neutron [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 686.800044] env[68638]: DEBUG oslo_concurrency.lockutils [req-7e1f60c4-2479-45d1-ae5d-94d9d9cf5d53 req-b3702d65-6384-405e-8193-5c3bd870e05f service nova] Releasing lock "refresh_cache-2fa9b930-c76c-4cac-a371-a6b9899dc71e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 686.833695] env[68638]: DEBUG nova.network.neutron [req-e9839d35-1cad-479b-8cda-39cc1acf1cab req-cab914b5-dbbd-47d8-9831-4068d642c59d service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 686.996058] env[68638]: DEBUG nova.network.neutron [req-e9839d35-1cad-479b-8cda-39cc1acf1cab req-cab914b5-dbbd-47d8-9831-4068d642c59d service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.048764] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52cd1cbd-3599-dfc5-fe85-8eeb46de3997, 'name': SearchDatastore_Task, 'duration_secs': 0.014084} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.052369] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.052968] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 2fa9b930-c76c-4cac-a371-a6b9899dc71e/2fa9b930-c76c-4cac-a371-a6b9899dc71e.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 687.053592] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c5dd4cf3-f71d-4cf4-bfa2-0f458aa2bdb2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.061976] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 687.061976] env[68638]: value = "task-2833262" [ 687.061976] env[68638]: _type = "Task" [ 687.061976] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.077061] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833262, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.125358] env[68638]: DEBUG nova.network.neutron [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Successfully updated port: 286b0758-18ef-4ab0-bf2c-05e465a216ad {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 687.190991] env[68638]: DEBUG nova.network.neutron [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 687.364198] env[68638]: DEBUG nova.network.neutron [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Updating instance_info_cache with network_info: [{"id": "19168c10-c119-4308-9487-6e17b5861113", "address": "fa:16:3e:c8:31:91", "network": {"id": "134e5fd0-8784-45a6-8e49-00269b42015f", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-815632853-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4ebea664959481685e3adc85304e174", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b911797-478d-4ee5-bce9-6f2f49014e94", "external-id": "nsx-vlan-transportzone-70", "segmentation_id": 70, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19168c10-c1", "ovs_interfaceid": "19168c10-c119-4308-9487-6e17b5861113", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.487668] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3acce11d-a5d2-499e-85b0-d2822a76b559 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.503023] env[68638]: DEBUG oslo_concurrency.lockutils [req-e9839d35-1cad-479b-8cda-39cc1acf1cab req-cab914b5-dbbd-47d8-9831-4068d642c59d service nova] Releasing lock "refresh_cache-aaf0185b-1a85-4e0e-afb1-55e9e2417d76" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.503023] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b04b8139-2542-45a2-8c70-12b1c67dbf0f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.545140] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59216869-c55f-4620-86fa-532749c99c74 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.558165] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6810436e-9b62-4140-b05d-c4da953aff1a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.576083] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833262, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.581879] env[68638]: DEBUG nova.compute.provider_tree [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.631033] env[68638]: DEBUG oslo_concurrency.lockutils [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Acquiring lock "refresh_cache-aaf0185b-1a85-4e0e-afb1-55e9e2417d76" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.631033] env[68638]: DEBUG oslo_concurrency.lockutils [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Acquired lock "refresh_cache-aaf0185b-1a85-4e0e-afb1-55e9e2417d76" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 687.631033] env[68638]: DEBUG nova.network.neutron [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 687.688317] env[68638]: DEBUG nova.compute.manager [req-5456ef53-96f3-4d9d-b1f6-dbaccc99aac6 req-8e8c94f9-8979-48aa-9d9c-fe4ee33d91a2 service nova] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Received event network-vif-plugged-19168c10-c119-4308-9487-6e17b5861113 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 687.688416] env[68638]: DEBUG oslo_concurrency.lockutils [req-5456ef53-96f3-4d9d-b1f6-dbaccc99aac6 req-8e8c94f9-8979-48aa-9d9c-fe4ee33d91a2 service nova] Acquiring lock "7b0b6eec-4681-4926-ad3f-5572e022a467-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.690125] env[68638]: DEBUG oslo_concurrency.lockutils [req-5456ef53-96f3-4d9d-b1f6-dbaccc99aac6 req-8e8c94f9-8979-48aa-9d9c-fe4ee33d91a2 service nova] Lock "7b0b6eec-4681-4926-ad3f-5572e022a467-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.690125] env[68638]: DEBUG oslo_concurrency.lockutils [req-5456ef53-96f3-4d9d-b1f6-dbaccc99aac6 req-8e8c94f9-8979-48aa-9d9c-fe4ee33d91a2 service nova] Lock "7b0b6eec-4681-4926-ad3f-5572e022a467-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.690125] env[68638]: DEBUG nova.compute.manager [req-5456ef53-96f3-4d9d-b1f6-dbaccc99aac6 req-8e8c94f9-8979-48aa-9d9c-fe4ee33d91a2 service nova] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] No waiting events found dispatching network-vif-plugged-19168c10-c119-4308-9487-6e17b5861113 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 687.690125] env[68638]: WARNING nova.compute.manager [req-5456ef53-96f3-4d9d-b1f6-dbaccc99aac6 req-8e8c94f9-8979-48aa-9d9c-fe4ee33d91a2 service nova] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Received unexpected event network-vif-plugged-19168c10-c119-4308-9487-6e17b5861113 for instance with vm_state building and task_state spawning. [ 687.690125] env[68638]: DEBUG nova.compute.manager [req-5456ef53-96f3-4d9d-b1f6-dbaccc99aac6 req-8e8c94f9-8979-48aa-9d9c-fe4ee33d91a2 service nova] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Received event network-changed-19168c10-c119-4308-9487-6e17b5861113 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 687.690699] env[68638]: DEBUG nova.compute.manager [req-5456ef53-96f3-4d9d-b1f6-dbaccc99aac6 req-8e8c94f9-8979-48aa-9d9c-fe4ee33d91a2 service nova] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Refreshing instance network info cache due to event network-changed-19168c10-c119-4308-9487-6e17b5861113. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 687.690856] env[68638]: DEBUG oslo_concurrency.lockutils [req-5456ef53-96f3-4d9d-b1f6-dbaccc99aac6 req-8e8c94f9-8979-48aa-9d9c-fe4ee33d91a2 service nova] Acquiring lock "refresh_cache-7b0b6eec-4681-4926-ad3f-5572e022a467" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.869741] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Releasing lock "refresh_cache-7b0b6eec-4681-4926-ad3f-5572e022a467" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.870234] env[68638]: DEBUG nova.compute.manager [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Instance network_info: |[{"id": "19168c10-c119-4308-9487-6e17b5861113", "address": "fa:16:3e:c8:31:91", "network": {"id": "134e5fd0-8784-45a6-8e49-00269b42015f", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-815632853-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4ebea664959481685e3adc85304e174", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b911797-478d-4ee5-bce9-6f2f49014e94", "external-id": "nsx-vlan-transportzone-70", "segmentation_id": 70, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19168c10-c1", "ovs_interfaceid": "19168c10-c119-4308-9487-6e17b5861113", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 687.870674] env[68638]: DEBUG oslo_concurrency.lockutils [req-5456ef53-96f3-4d9d-b1f6-dbaccc99aac6 req-8e8c94f9-8979-48aa-9d9c-fe4ee33d91a2 service nova] Acquired lock "refresh_cache-7b0b6eec-4681-4926-ad3f-5572e022a467" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 687.870931] env[68638]: DEBUG nova.network.neutron [req-5456ef53-96f3-4d9d-b1f6-dbaccc99aac6 req-8e8c94f9-8979-48aa-9d9c-fe4ee33d91a2 service nova] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Refreshing network info cache for port 19168c10-c119-4308-9487-6e17b5861113 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 687.872360] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:31:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9b911797-478d-4ee5-bce9-6f2f49014e94', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '19168c10-c119-4308-9487-6e17b5861113', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 687.885699] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Creating folder: Project (d4ebea664959481685e3adc85304e174). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 687.887585] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d4e50435-6e56-4691-b1c1-5ded6cfbbae0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.902199] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Created folder: Project (d4ebea664959481685e3adc85304e174) in parent group-v569734. [ 687.902433] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Creating folder: Instances. Parent ref: group-v569825. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 687.902686] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-abb63ddf-508b-4ec6-a061-1ec260856268 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.915567] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Created folder: Instances in parent group-v569825. [ 687.916033] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 687.916033] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 687.916283] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a436410-7634-4a56-9900-2a9a0a4fd787 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.942345] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 687.942345] env[68638]: value = "task-2833265" [ 687.942345] env[68638]: _type = "Task" [ 687.942345] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.951929] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833265, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.076023] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833262, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552792} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.076023] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 2fa9b930-c76c-4cac-a371-a6b9899dc71e/2fa9b930-c76c-4cac-a371-a6b9899dc71e.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 688.076023] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 688.076023] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b297137f-7070-4a5d-9145-7e75b683a50d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.083241] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 688.083241] env[68638]: value = "task-2833266" [ 688.083241] env[68638]: _type = "Task" [ 688.083241] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.088286] env[68638]: DEBUG nova.scheduler.client.report [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 688.104750] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833266, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.220120] env[68638]: DEBUG nova.network.neutron [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 688.386318] env[68638]: DEBUG nova.compute.manager [req-913c9830-e53f-4afd-8d36-17651eb898ea req-e95056b8-360d-4d56-8ff4-2a7f8a0944e1 service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Received event network-vif-plugged-286b0758-18ef-4ab0-bf2c-05e465a216ad {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 688.386439] env[68638]: DEBUG oslo_concurrency.lockutils [req-913c9830-e53f-4afd-8d36-17651eb898ea req-e95056b8-360d-4d56-8ff4-2a7f8a0944e1 service nova] Acquiring lock "aaf0185b-1a85-4e0e-afb1-55e9e2417d76-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 688.386702] env[68638]: DEBUG oslo_concurrency.lockutils [req-913c9830-e53f-4afd-8d36-17651eb898ea req-e95056b8-360d-4d56-8ff4-2a7f8a0944e1 service nova] Lock "aaf0185b-1a85-4e0e-afb1-55e9e2417d76-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 688.386896] env[68638]: DEBUG oslo_concurrency.lockutils [req-913c9830-e53f-4afd-8d36-17651eb898ea req-e95056b8-360d-4d56-8ff4-2a7f8a0944e1 service nova] Lock "aaf0185b-1a85-4e0e-afb1-55e9e2417d76-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 688.387193] env[68638]: DEBUG nova.compute.manager [req-913c9830-e53f-4afd-8d36-17651eb898ea req-e95056b8-360d-4d56-8ff4-2a7f8a0944e1 service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] No waiting events found dispatching network-vif-plugged-286b0758-18ef-4ab0-bf2c-05e465a216ad {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 688.387421] env[68638]: WARNING nova.compute.manager [req-913c9830-e53f-4afd-8d36-17651eb898ea req-e95056b8-360d-4d56-8ff4-2a7f8a0944e1 service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Received unexpected event network-vif-plugged-286b0758-18ef-4ab0-bf2c-05e465a216ad for instance with vm_state building and task_state spawning. [ 688.387618] env[68638]: DEBUG nova.compute.manager [req-913c9830-e53f-4afd-8d36-17651eb898ea req-e95056b8-360d-4d56-8ff4-2a7f8a0944e1 service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Received event network-changed-286b0758-18ef-4ab0-bf2c-05e465a216ad {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 688.387964] env[68638]: DEBUG nova.compute.manager [req-913c9830-e53f-4afd-8d36-17651eb898ea req-e95056b8-360d-4d56-8ff4-2a7f8a0944e1 service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Refreshing instance network info cache due to event network-changed-286b0758-18ef-4ab0-bf2c-05e465a216ad. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 688.388076] env[68638]: DEBUG oslo_concurrency.lockutils [req-913c9830-e53f-4afd-8d36-17651eb898ea req-e95056b8-360d-4d56-8ff4-2a7f8a0944e1 service nova] Acquiring lock "refresh_cache-aaf0185b-1a85-4e0e-afb1-55e9e2417d76" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.454673] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833265, 'name': CreateVM_Task, 'duration_secs': 0.478155} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.454673] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 688.454991] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.458017] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 688.458017] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 688.458017] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9d03b6a-d5f7-4d62-902f-f9ad333c37c9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.463088] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Waiting for the task: (returnval){ [ 688.463088] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]521f842f-86af-d7de-b645-c9c804305e66" [ 688.463088] env[68638]: _type = "Task" [ 688.463088] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.471395] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]521f842f-86af-d7de-b645-c9c804305e66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.593357] env[68638]: DEBUG oslo_concurrency.lockutils [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.798s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 688.593868] env[68638]: DEBUG nova.compute.manager [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 688.596370] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833266, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.160549} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.596833] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.949s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 688.598204] env[68638]: INFO nova.compute.claims [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 688.608020] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 688.611169] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9a1648e-a38b-4cc6-8c20-0e91b28f056a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.652083] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] 2fa9b930-c76c-4cac-a371-a6b9899dc71e/2fa9b930-c76c-4cac-a371-a6b9899dc71e.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 688.652921] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4db93f9d-741d-4cd6-b425-35e472c44fef {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.682423] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 688.682423] env[68638]: value = "task-2833267" [ 688.682423] env[68638]: _type = "Task" [ 688.682423] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.694239] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833267, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.956155] env[68638]: DEBUG nova.network.neutron [req-5456ef53-96f3-4d9d-b1f6-dbaccc99aac6 req-8e8c94f9-8979-48aa-9d9c-fe4ee33d91a2 service nova] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Updated VIF entry in instance network info cache for port 19168c10-c119-4308-9487-6e17b5861113. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 688.956597] env[68638]: DEBUG nova.network.neutron [req-5456ef53-96f3-4d9d-b1f6-dbaccc99aac6 req-8e8c94f9-8979-48aa-9d9c-fe4ee33d91a2 service nova] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Updating instance_info_cache with network_info: [{"id": "19168c10-c119-4308-9487-6e17b5861113", "address": "fa:16:3e:c8:31:91", "network": {"id": "134e5fd0-8784-45a6-8e49-00269b42015f", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-815632853-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4ebea664959481685e3adc85304e174", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b911797-478d-4ee5-bce9-6f2f49014e94", "external-id": "nsx-vlan-transportzone-70", "segmentation_id": 70, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19168c10-c1", "ovs_interfaceid": "19168c10-c119-4308-9487-6e17b5861113", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.976298] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]521f842f-86af-d7de-b645-c9c804305e66, 'name': SearchDatastore_Task, 'duration_secs': 0.011987} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.978584] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 688.978584] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 688.978584] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.978584] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 688.978733] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 688.978733] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be9e05e7-6730-455d-9204-a105ac78b780 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.989753] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 688.989861] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 688.992303] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-246b2a28-a43b-4039-a805-668a3977e601 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.997748] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Waiting for the task: (returnval){ [ 688.997748] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52738b74-e8a2-fc2b-f1a8-898aace97945" [ 688.997748] env[68638]: _type = "Task" [ 688.997748] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.012449] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52738b74-e8a2-fc2b-f1a8-898aace97945, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.099078] env[68638]: DEBUG nova.compute.utils [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 689.100381] env[68638]: DEBUG nova.compute.manager [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 689.100555] env[68638]: DEBUG nova.network.neutron [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 689.198513] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833267, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.219289] env[68638]: DEBUG nova.policy [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5b140aa82f044f108521ab8c0d28c0a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3e5757d1f74492481048df4a29032ca', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 689.325958] env[68638]: DEBUG nova.network.neutron [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Updating instance_info_cache with network_info: [{"id": "24a47fde-b177-4dfe-af1b-12b1396cf1a4", "address": "fa:16:3e:45:23:67", "network": {"id": "8aa12228-52d8-4e89-b09b-28c49a3832ca", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1839165950", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.135", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a20bc501951647abbd0c0d8e075312e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24a47fde-b1", "ovs_interfaceid": "24a47fde-b177-4dfe-af1b-12b1396cf1a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6cc59411-51bc-4b50-8095-b1d16aac6e44", "address": "fa:16:3e:fb:ab:46", "network": {"id": "e6f66e2d-f70a-4fd0-b80e-dea598811a10", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-156727678", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.166", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a20bc501951647abbd0c0d8e075312e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cc59411-51", "ovs_interfaceid": "6cc59411-51bc-4b50-8095-b1d16aac6e44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "286b0758-18ef-4ab0-bf2c-05e465a216ad", "address": "fa:16:3e:2f:5a:34", "network": {"id": "8aa12228-52d8-4e89-b09b-28c49a3832ca", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1839165950", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.213", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a20bc501951647abbd0c0d8e075312e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap286b0758-18", "ovs_interfaceid": "286b0758-18ef-4ab0-bf2c-05e465a216ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.460146] env[68638]: DEBUG oslo_concurrency.lockutils [req-5456ef53-96f3-4d9d-b1f6-dbaccc99aac6 req-8e8c94f9-8979-48aa-9d9c-fe4ee33d91a2 service nova] Releasing lock "refresh_cache-7b0b6eec-4681-4926-ad3f-5572e022a467" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.511473] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52738b74-e8a2-fc2b-f1a8-898aace97945, 'name': SearchDatastore_Task, 'duration_secs': 0.01719} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.512462] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fb6860a-29f4-4858-b527-579c1c0e9c18 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.518673] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Waiting for the task: (returnval){ [ 689.518673] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52290b4c-0516-591c-32f0-bda7885f7346" [ 689.518673] env[68638]: _type = "Task" [ 689.518673] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.527351] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52290b4c-0516-591c-32f0-bda7885f7346, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.610842] env[68638]: DEBUG nova.compute.manager [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 689.694742] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833267, 'name': ReconfigVM_Task, 'duration_secs': 0.656027} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.700146] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Reconfigured VM instance instance-0000001f to attach disk [datastore2] 2fa9b930-c76c-4cac-a371-a6b9899dc71e/2fa9b930-c76c-4cac-a371-a6b9899dc71e.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 689.700146] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cce64b27-593b-4f1f-b2c0-48ad775f21b1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.707539] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 689.707539] env[68638]: value = "task-2833268" [ 689.707539] env[68638]: _type = "Task" [ 689.707539] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.716506] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833268, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.827908] env[68638]: DEBUG oslo_concurrency.lockutils [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Releasing lock "refresh_cache-aaf0185b-1a85-4e0e-afb1-55e9e2417d76" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.828350] env[68638]: DEBUG nova.compute.manager [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Instance network_info: |[{"id": "24a47fde-b177-4dfe-af1b-12b1396cf1a4", "address": "fa:16:3e:45:23:67", "network": {"id": "8aa12228-52d8-4e89-b09b-28c49a3832ca", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1839165950", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.135", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a20bc501951647abbd0c0d8e075312e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24a47fde-b1", "ovs_interfaceid": "24a47fde-b177-4dfe-af1b-12b1396cf1a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6cc59411-51bc-4b50-8095-b1d16aac6e44", "address": "fa:16:3e:fb:ab:46", "network": {"id": "e6f66e2d-f70a-4fd0-b80e-dea598811a10", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-156727678", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.166", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a20bc501951647abbd0c0d8e075312e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cc59411-51", "ovs_interfaceid": "6cc59411-51bc-4b50-8095-b1d16aac6e44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "286b0758-18ef-4ab0-bf2c-05e465a216ad", "address": "fa:16:3e:2f:5a:34", "network": {"id": "8aa12228-52d8-4e89-b09b-28c49a3832ca", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1839165950", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.213", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a20bc501951647abbd0c0d8e075312e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap286b0758-18", "ovs_interfaceid": "286b0758-18ef-4ab0-bf2c-05e465a216ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 689.830976] env[68638]: DEBUG oslo_concurrency.lockutils [req-913c9830-e53f-4afd-8d36-17651eb898ea req-e95056b8-360d-4d56-8ff4-2a7f8a0944e1 service nova] Acquired lock "refresh_cache-aaf0185b-1a85-4e0e-afb1-55e9e2417d76" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.831284] env[68638]: DEBUG nova.network.neutron [req-913c9830-e53f-4afd-8d36-17651eb898ea req-e95056b8-360d-4d56-8ff4-2a7f8a0944e1 service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Refreshing network info cache for port 286b0758-18ef-4ab0-bf2c-05e465a216ad {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 689.832619] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:23:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec763be6-4041-4651-8fd7-3820cf0ab86d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '24a47fde-b177-4dfe-af1b-12b1396cf1a4', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:ab:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9f3a2eb5-353f-45c5-a73b-869626f4bb13', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6cc59411-51bc-4b50-8095-b1d16aac6e44', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:5a:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec763be6-4041-4651-8fd7-3820cf0ab86d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '286b0758-18ef-4ab0-bf2c-05e465a216ad', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 689.852099] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Creating folder: Project (a20bc501951647abbd0c0d8e075312e2). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 689.858378] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6b9bad7d-4eda-40d4-bb9b-e8b5c30f9c9f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.860773] env[68638]: DEBUG nova.network.neutron [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Successfully created port: 945ff6d9-4999-47aa-b917-48298ca743df {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 689.872388] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Created folder: Project (a20bc501951647abbd0c0d8e075312e2) in parent group-v569734. [ 689.872589] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Creating folder: Instances. Parent ref: group-v569828. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 689.872836] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7a166915-1df2-4331-bdab-b3c67784ec59 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.882525] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Created folder: Instances in parent group-v569828. [ 689.882837] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 689.883053] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 689.883317] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9deea95b-e2c1-4930-9fd2-b2e78d20099e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.912305] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 689.912305] env[68638]: value = "task-2833271" [ 689.912305] env[68638]: _type = "Task" [ 689.912305] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.920635] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833271, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.029064] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52290b4c-0516-591c-32f0-bda7885f7346, 'name': SearchDatastore_Task, 'duration_secs': 0.010141} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.029317] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 690.032232] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 7b0b6eec-4681-4926-ad3f-5572e022a467/7b0b6eec-4681-4926-ad3f-5572e022a467.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 690.032232] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cf61c223-aa93-413f-b52b-3f56f98621d0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.037217] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Waiting for the task: (returnval){ [ 690.037217] env[68638]: value = "task-2833272" [ 690.037217] env[68638]: _type = "Task" [ 690.037217] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.048033] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': task-2833272, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.223068] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833268, 'name': Rename_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.297504] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3420fe0-a658-4b44-b9c4-20d3c06d9ab1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.305314] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-956de957-5eb1-4fb9-9b43-2add6a205840 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.342903] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac0cbe27-e03c-4921-9bf9-aad3e0f4f398 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.350633] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede1f344-7898-4c91-90ec-9f2f758c66de {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.365690] env[68638]: DEBUG nova.compute.provider_tree [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 690.422094] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833271, 'name': CreateVM_Task, 'duration_secs': 0.462839} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.422365] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 690.423273] env[68638]: DEBUG oslo_concurrency.lockutils [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.423458] env[68638]: DEBUG oslo_concurrency.lockutils [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 690.423841] env[68638]: DEBUG oslo_concurrency.lockutils [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 690.424041] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1a29e08-c1fd-4a1f-8b4d-89310c098fdb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.431248] env[68638]: DEBUG oslo_vmware.api [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Waiting for the task: (returnval){ [ 690.431248] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]527815cb-859e-9b4e-a93b-f71255f8eefa" [ 690.431248] env[68638]: _type = "Task" [ 690.431248] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.440007] env[68638]: DEBUG oslo_vmware.api [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527815cb-859e-9b4e-a93b-f71255f8eefa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.547377] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': task-2833272, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.631149] env[68638]: DEBUG nova.compute.manager [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 690.660977] env[68638]: DEBUG nova.virt.hardware [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 690.664850] env[68638]: DEBUG nova.virt.hardware [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 690.664850] env[68638]: DEBUG nova.virt.hardware [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 690.664850] env[68638]: DEBUG nova.virt.hardware [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 690.664850] env[68638]: DEBUG nova.virt.hardware [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 690.664850] env[68638]: DEBUG nova.virt.hardware [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 690.664850] env[68638]: DEBUG nova.virt.hardware [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 690.664850] env[68638]: DEBUG nova.virt.hardware [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 690.664850] env[68638]: DEBUG nova.virt.hardware [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 690.664850] env[68638]: DEBUG nova.virt.hardware [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 690.664850] env[68638]: DEBUG nova.virt.hardware [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 690.664850] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ee35163-0397-40f5-8729-d6590710d600 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.671459] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e58350e7-7743-472b-9f51-8958c2490a42 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.709281] env[68638]: DEBUG nova.network.neutron [req-913c9830-e53f-4afd-8d36-17651eb898ea req-e95056b8-360d-4d56-8ff4-2a7f8a0944e1 service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Updated VIF entry in instance network info cache for port 286b0758-18ef-4ab0-bf2c-05e465a216ad. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 690.709780] env[68638]: DEBUG nova.network.neutron [req-913c9830-e53f-4afd-8d36-17651eb898ea req-e95056b8-360d-4d56-8ff4-2a7f8a0944e1 service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Updating instance_info_cache with network_info: [{"id": "24a47fde-b177-4dfe-af1b-12b1396cf1a4", "address": "fa:16:3e:45:23:67", "network": {"id": "8aa12228-52d8-4e89-b09b-28c49a3832ca", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1839165950", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.135", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a20bc501951647abbd0c0d8e075312e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24a47fde-b1", "ovs_interfaceid": "24a47fde-b177-4dfe-af1b-12b1396cf1a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6cc59411-51bc-4b50-8095-b1d16aac6e44", "address": "fa:16:3e:fb:ab:46", "network": {"id": "e6f66e2d-f70a-4fd0-b80e-dea598811a10", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-156727678", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.166", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a20bc501951647abbd0c0d8e075312e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9f3a2eb5-353f-45c5-a73b-869626f4bb13", "external-id": "nsx-vlan-transportzone-411", "segmentation_id": 411, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6cc59411-51", "ovs_interfaceid": "6cc59411-51bc-4b50-8095-b1d16aac6e44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "286b0758-18ef-4ab0-bf2c-05e465a216ad", "address": "fa:16:3e:2f:5a:34", "network": {"id": "8aa12228-52d8-4e89-b09b-28c49a3832ca", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1839165950", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.213", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a20bc501951647abbd0c0d8e075312e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap286b0758-18", "ovs_interfaceid": "286b0758-18ef-4ab0-bf2c-05e465a216ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.722982] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833268, 'name': Rename_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.868657] env[68638]: DEBUG nova.scheduler.client.report [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 690.945021] env[68638]: DEBUG oslo_vmware.api [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527815cb-859e-9b4e-a93b-f71255f8eefa, 'name': SearchDatastore_Task, 'duration_secs': 0.010886} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.945021] env[68638]: DEBUG oslo_concurrency.lockutils [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 690.945021] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 690.945021] env[68638]: DEBUG oslo_concurrency.lockutils [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.945021] env[68638]: DEBUG oslo_concurrency.lockutils [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 690.945021] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 690.945021] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0706555e-bbc1-41d5-9d7f-8306dc7401ac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.953603] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 690.953806] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 690.954581] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b70b4a6-4ef9-4ed8-9b82-00f579373266 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.965549] env[68638]: DEBUG oslo_vmware.api [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Waiting for the task: (returnval){ [ 690.965549] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5215c85e-7475-9bd8-d2df-46809864d939" [ 690.965549] env[68638]: _type = "Task" [ 690.965549] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.976346] env[68638]: DEBUG oslo_vmware.api [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5215c85e-7475-9bd8-d2df-46809864d939, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.046290] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': task-2833272, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.218235] env[68638]: DEBUG oslo_concurrency.lockutils [req-913c9830-e53f-4afd-8d36-17651eb898ea req-e95056b8-360d-4d56-8ff4-2a7f8a0944e1 service nova] Releasing lock "refresh_cache-aaf0185b-1a85-4e0e-afb1-55e9e2417d76" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 691.225373] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833268, 'name': Rename_Task, 'duration_secs': 1.162999} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.225470] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 691.225701] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd29b8a0-4c1d-4ca0-a3ed-b34d7818b296 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.234874] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 691.234874] env[68638]: value = "task-2833273" [ 691.234874] env[68638]: _type = "Task" [ 691.234874] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.247266] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833273, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.376930] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.780s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 691.377499] env[68638]: DEBUG nova.compute.manager [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 691.380108] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.307s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.380362] env[68638]: DEBUG nova.objects.instance [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Lazy-loading 'resources' on Instance uuid 168c2937-f8ce-472f-b21f-e48eed909f43 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 691.478568] env[68638]: DEBUG oslo_vmware.api [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5215c85e-7475-9bd8-d2df-46809864d939, 'name': SearchDatastore_Task, 'duration_secs': 0.011091} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.479639] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-decad691-b299-4e45-8e86-369467a77c25 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.486708] env[68638]: DEBUG oslo_vmware.api [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Waiting for the task: (returnval){ [ 691.486708] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52662958-8b1b-02d8-30ec-3ea1e50a4c5f" [ 691.486708] env[68638]: _type = "Task" [ 691.486708] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.498069] env[68638]: DEBUG oslo_vmware.api [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52662958-8b1b-02d8-30ec-3ea1e50a4c5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.549017] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': task-2833272, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.331878} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.549694] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 7b0b6eec-4681-4926-ad3f-5572e022a467/7b0b6eec-4681-4926-ad3f-5572e022a467.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 691.549916] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 691.550456] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-61a89d86-1cc6-40d3-8382-72f080d059af {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.557098] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Waiting for the task: (returnval){ [ 691.557098] env[68638]: value = "task-2833274" [ 691.557098] env[68638]: _type = "Task" [ 691.557098] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.565639] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': task-2833274, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.745586] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833273, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.884456] env[68638]: DEBUG nova.compute.utils [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 691.885399] env[68638]: DEBUG nova.compute.manager [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Not allocating networking since 'none' was specified. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 691.996366] env[68638]: DEBUG oslo_vmware.api [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52662958-8b1b-02d8-30ec-3ea1e50a4c5f, 'name': SearchDatastore_Task, 'duration_secs': 0.012345} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.999141] env[68638]: DEBUG oslo_concurrency.lockutils [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 691.999228] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] aaf0185b-1a85-4e0e-afb1-55e9e2417d76/aaf0185b-1a85-4e0e-afb1-55e9e2417d76.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 691.999646] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-980b427c-6eee-4ec6-81f0-0626f6521a3c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.006610] env[68638]: DEBUG oslo_vmware.api [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Waiting for the task: (returnval){ [ 692.006610] env[68638]: value = "task-2833275" [ 692.006610] env[68638]: _type = "Task" [ 692.006610] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.016528] env[68638]: DEBUG oslo_vmware.api [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833275, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.066873] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': task-2833274, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06995} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.067362] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 692.071328] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c31b3c4f-6439-4694-83fa-5e61a59cd9e5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.105547] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Reconfiguring VM instance instance-00000020 to attach disk [datastore2] 7b0b6eec-4681-4926-ad3f-5572e022a467/7b0b6eec-4681-4926-ad3f-5572e022a467.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 692.110594] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8b07aba-487b-4464-b3c8-a1b24ed94e7a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.133268] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Waiting for the task: (returnval){ [ 692.133268] env[68638]: value = "task-2833276" [ 692.133268] env[68638]: _type = "Task" [ 692.133268] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.143436] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': task-2833276, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.203592] env[68638]: DEBUG nova.compute.manager [req-d9a4d342-8a5b-4cc7-88d9-136c81e47156 req-36c0ea61-bd57-4047-bbd6-3cffe8fd43b8 service nova] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Received event network-vif-plugged-945ff6d9-4999-47aa-b917-48298ca743df {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 692.203684] env[68638]: DEBUG oslo_concurrency.lockutils [req-d9a4d342-8a5b-4cc7-88d9-136c81e47156 req-36c0ea61-bd57-4047-bbd6-3cffe8fd43b8 service nova] Acquiring lock "5a28d684-584b-4e13-9910-183119ce5d37-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 692.204720] env[68638]: DEBUG oslo_concurrency.lockutils [req-d9a4d342-8a5b-4cc7-88d9-136c81e47156 req-36c0ea61-bd57-4047-bbd6-3cffe8fd43b8 service nova] Lock "5a28d684-584b-4e13-9910-183119ce5d37-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.204720] env[68638]: DEBUG oslo_concurrency.lockutils [req-d9a4d342-8a5b-4cc7-88d9-136c81e47156 req-36c0ea61-bd57-4047-bbd6-3cffe8fd43b8 service nova] Lock "5a28d684-584b-4e13-9910-183119ce5d37-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.204720] env[68638]: DEBUG nova.compute.manager [req-d9a4d342-8a5b-4cc7-88d9-136c81e47156 req-36c0ea61-bd57-4047-bbd6-3cffe8fd43b8 service nova] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] No waiting events found dispatching network-vif-plugged-945ff6d9-4999-47aa-b917-48298ca743df {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 692.204720] env[68638]: WARNING nova.compute.manager [req-d9a4d342-8a5b-4cc7-88d9-136c81e47156 req-36c0ea61-bd57-4047-bbd6-3cffe8fd43b8 service nova] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Received unexpected event network-vif-plugged-945ff6d9-4999-47aa-b917-48298ca743df for instance with vm_state building and task_state spawning. [ 692.253832] env[68638]: DEBUG oslo_vmware.api [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833273, 'name': PowerOnVM_Task, 'duration_secs': 0.583331} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.258172] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 692.258431] env[68638]: INFO nova.compute.manager [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Took 9.48 seconds to spawn the instance on the hypervisor. [ 692.258687] env[68638]: DEBUG nova.compute.manager [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 692.259696] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db79a0a2-93ae-43f8-b491-ef3f9a90e66c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.391720] env[68638]: DEBUG nova.compute.manager [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 692.464701] env[68638]: DEBUG nova.network.neutron [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Successfully updated port: 945ff6d9-4999-47aa-b917-48298ca743df {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 692.525620] env[68638]: DEBUG oslo_vmware.api [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833275, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.556969] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c959f88a-532c-4395-987a-8cb581fb2cce {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.568599] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d268cc37-1781-4cdc-94eb-deb263b7ee38 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.600067] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b015fa7-31f4-4eb8-a605-3ea005addb46 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.606806] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a1e154-ed5f-4012-ad53-24431188d34f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.628602] env[68638]: DEBUG nova.compute.provider_tree [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 692.647949] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': task-2833276, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.784681] env[68638]: INFO nova.compute.manager [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Took 49.99 seconds to build instance. [ 692.966784] env[68638]: DEBUG oslo_concurrency.lockutils [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "refresh_cache-5a28d684-584b-4e13-9910-183119ce5d37" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.966943] env[68638]: DEBUG oslo_concurrency.lockutils [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquired lock "refresh_cache-5a28d684-584b-4e13-9910-183119ce5d37" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.967074] env[68638]: DEBUG nova.network.neutron [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 693.025019] env[68638]: DEBUG oslo_vmware.api [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833275, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.900519} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.025019] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] aaf0185b-1a85-4e0e-afb1-55e9e2417d76/aaf0185b-1a85-4e0e-afb1-55e9e2417d76.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 693.025019] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 693.025019] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8c045165-f938-4af2-8705-3e40e4fe34a6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.029707] env[68638]: DEBUG oslo_vmware.api [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Waiting for the task: (returnval){ [ 693.029707] env[68638]: value = "task-2833277" [ 693.029707] env[68638]: _type = "Task" [ 693.029707] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.044618] env[68638]: DEBUG oslo_vmware.api [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833277, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.132696] env[68638]: DEBUG nova.scheduler.client.report [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 693.152259] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': task-2833276, 'name': ReconfigVM_Task, 'duration_secs': 0.86292} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.153403] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Reconfigured VM instance instance-00000020 to attach disk [datastore2] 7b0b6eec-4681-4926-ad3f-5572e022a467/7b0b6eec-4681-4926-ad3f-5572e022a467.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 693.154059] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a85ff781-1311-4e54-9ddd-78138d77e2d4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.164020] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Waiting for the task: (returnval){ [ 693.164020] env[68638]: value = "task-2833278" [ 693.164020] env[68638]: _type = "Task" [ 693.164020] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.171840] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': task-2833278, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.287657] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d97edc3-07c6-4602-8b46-e435dc6bd44e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "2fa9b930-c76c-4cac-a371-a6b9899dc71e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.965s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 693.403474] env[68638]: DEBUG nova.compute.manager [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 693.440656] env[68638]: DEBUG nova.virt.hardware [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 693.441376] env[68638]: DEBUG nova.virt.hardware [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 693.441376] env[68638]: DEBUG nova.virt.hardware [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 693.441376] env[68638]: DEBUG nova.virt.hardware [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 693.441376] env[68638]: DEBUG nova.virt.hardware [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 693.441566] env[68638]: DEBUG nova.virt.hardware [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 693.441722] env[68638]: DEBUG nova.virt.hardware [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 693.442066] env[68638]: DEBUG nova.virt.hardware [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 693.442336] env[68638]: DEBUG nova.virt.hardware [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 693.442598] env[68638]: DEBUG nova.virt.hardware [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 693.442808] env[68638]: DEBUG nova.virt.hardware [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 693.445018] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeaf06cc-3cbd-4ed8-837f-850685a687ea {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.455127] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ffd1103-ff0e-4220-b734-87b1b5f191f9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.474876] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Instance VIF info [] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 693.481504] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Creating folder: Project (187c9a2b97234da5bbf09ea32f457966). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 693.482336] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa4debe0-912a-4c35-a6b4-0f3ea2ef6624 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.493045] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Created folder: Project (187c9a2b97234da5bbf09ea32f457966) in parent group-v569734. [ 693.493045] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Creating folder: Instances. Parent ref: group-v569831. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 693.493045] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7ccfea0d-345b-4cca-aa7b-1dae234a6999 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.506899] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "90c192bd-b823-414c-b793-260eacc9904f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.507125] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "90c192bd-b823-414c-b793-260eacc9904f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 693.507387] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Created folder: Instances in parent group-v569831. [ 693.507641] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 693.507786] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 693.508086] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6581f0e7-f9fa-4c87-94cd-0dc1559c4255 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.524526] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 693.524526] env[68638]: value = "task-2833281" [ 693.524526] env[68638]: _type = "Task" [ 693.524526] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.525301] env[68638]: DEBUG nova.network.neutron [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.538251] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833281, 'name': CreateVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.541193] env[68638]: DEBUG oslo_vmware.api [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833277, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079851} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.541442] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 693.542247] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c222c51-8fed-45dd-a1f0-9674f2a96175 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.569103] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Reconfiguring VM instance instance-0000001e to attach disk [datastore2] aaf0185b-1a85-4e0e-afb1-55e9e2417d76/aaf0185b-1a85-4e0e-afb1-55e9e2417d76.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 693.572237] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04517e1d-b592-4666-a29a-10387ba88ec6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.592297] env[68638]: DEBUG oslo_vmware.api [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Waiting for the task: (returnval){ [ 693.592297] env[68638]: value = "task-2833282" [ 693.592297] env[68638]: _type = "Task" [ 693.592297] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.601024] env[68638]: DEBUG oslo_vmware.api [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833282, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.642351] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.262s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 693.645733] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.503s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 693.649085] env[68638]: INFO nova.compute.claims [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 693.668380] env[68638]: INFO nova.scheduler.client.report [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Deleted allocations for instance 168c2937-f8ce-472f-b21f-e48eed909f43 [ 693.682227] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': task-2833278, 'name': Rename_Task, 'duration_secs': 0.170194} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.685165] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 693.685165] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-64f5af66-599f-41cd-a641-b70fd014bf0f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.691341] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Waiting for the task: (returnval){ [ 693.691341] env[68638]: value = "task-2833283" [ 693.691341] env[68638]: _type = "Task" [ 693.691341] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.698871] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': task-2833283, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.793727] env[68638]: DEBUG nova.compute.manager [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 693.801835] env[68638]: DEBUG nova.network.neutron [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Updating instance_info_cache with network_info: [{"id": "945ff6d9-4999-47aa-b917-48298ca743df", "address": "fa:16:3e:2f:d2:66", "network": {"id": "104a324f-fd5a-4c74-9a7a-6126392ea10c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1310127541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3e5757d1f74492481048df4a29032ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap945ff6d9-49", "ovs_interfaceid": "945ff6d9-4999-47aa-b917-48298ca743df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.037189] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833281, 'name': CreateVM_Task, 'duration_secs': 0.315885} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.037376] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 694.037819] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.037974] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.038320] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 694.038587] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a1d8b00-d3f1-4e60-bd25-322685444e83 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.043089] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Waiting for the task: (returnval){ [ 694.043089] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]522bbfa4-561a-2f64-74ab-9689840705ed" [ 694.043089] env[68638]: _type = "Task" [ 694.043089] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.053088] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]522bbfa4-561a-2f64-74ab-9689840705ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.101840] env[68638]: DEBUG oslo_vmware.api [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833282, 'name': ReconfigVM_Task, 'duration_secs': 0.362076} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.102139] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Reconfigured VM instance instance-0000001e to attach disk [datastore2] aaf0185b-1a85-4e0e-afb1-55e9e2417d76/aaf0185b-1a85-4e0e-afb1-55e9e2417d76.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 694.102887] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-683e072c-2273-4c16-aea6-9f6d682fb788 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.110091] env[68638]: DEBUG oslo_vmware.api [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Waiting for the task: (returnval){ [ 694.110091] env[68638]: value = "task-2833284" [ 694.110091] env[68638]: _type = "Task" [ 694.110091] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.118012] env[68638]: DEBUG oslo_vmware.api [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833284, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.176806] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a1c896b5-fe8f-4f12-a47c-799bd960d0c4 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Lock "168c2937-f8ce-472f-b21f-e48eed909f43" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.664s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.177318] env[68638]: DEBUG oslo_concurrency.lockutils [None req-851bb306-e524-42ed-a8b0-3c266e4552fd tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Lock "168c2937-f8ce-472f-b21f-e48eed909f43" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 32.925s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.177439] env[68638]: DEBUG oslo_concurrency.lockutils [None req-851bb306-e524-42ed-a8b0-3c266e4552fd tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquiring lock "168c2937-f8ce-472f-b21f-e48eed909f43-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.177640] env[68638]: DEBUG oslo_concurrency.lockutils [None req-851bb306-e524-42ed-a8b0-3c266e4552fd tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Lock "168c2937-f8ce-472f-b21f-e48eed909f43-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.177794] env[68638]: DEBUG oslo_concurrency.lockutils [None req-851bb306-e524-42ed-a8b0-3c266e4552fd tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Lock "168c2937-f8ce-472f-b21f-e48eed909f43-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.183259] env[68638]: INFO nova.compute.manager [None req-851bb306-e524-42ed-a8b0-3c266e4552fd tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Terminating instance [ 694.206892] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': task-2833283, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.307668] env[68638]: DEBUG oslo_concurrency.lockutils [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Releasing lock "refresh_cache-5a28d684-584b-4e13-9910-183119ce5d37" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 694.308088] env[68638]: DEBUG nova.compute.manager [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Instance network_info: |[{"id": "945ff6d9-4999-47aa-b917-48298ca743df", "address": "fa:16:3e:2f:d2:66", "network": {"id": "104a324f-fd5a-4c74-9a7a-6126392ea10c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1310127541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3e5757d1f74492481048df4a29032ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap945ff6d9-49", "ovs_interfaceid": "945ff6d9-4999-47aa-b917-48298ca743df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 694.310680] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:d2:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3d2e4070-a78e-4d08-a104-b6312ab65577', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '945ff6d9-4999-47aa-b917-48298ca743df', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 694.325770] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Creating folder: Project (d3e5757d1f74492481048df4a29032ca). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 694.325770] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-46ae58b0-3a76-463e-a121-0ec6bfee73b8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.335164] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Created folder: Project (d3e5757d1f74492481048df4a29032ca) in parent group-v569734. [ 694.335377] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Creating folder: Instances. Parent ref: group-v569834. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 694.335624] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d47698f4-9ccf-4a13-bd06-8a4fac2a7557 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.341877] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.347373] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Created folder: Instances in parent group-v569834. [ 694.347373] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 694.347373] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 694.347373] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5edc61b0-b491-40ed-b25f-5601d1ef7bd1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.365225] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 694.365225] env[68638]: value = "task-2833287" [ 694.365225] env[68638]: _type = "Task" [ 694.365225] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.373219] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833287, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.559383] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]522bbfa4-561a-2f64-74ab-9689840705ed, 'name': SearchDatastore_Task, 'duration_secs': 0.050081} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.560318] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 694.560318] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 694.560318] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.560459] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.560563] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 694.560833] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-703448d8-7abc-442c-a1d3-001509c07df9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.569669] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 694.570035] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 694.570689] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93b22fd9-0dcb-4771-873a-67c447e5a05b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.576273] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Waiting for the task: (returnval){ [ 694.576273] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52319093-2a6b-d1c2-1d25-110ccb43608d" [ 694.576273] env[68638]: _type = "Task" [ 694.576273] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.585470] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52319093-2a6b-d1c2-1d25-110ccb43608d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.620502] env[68638]: DEBUG oslo_vmware.api [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833284, 'name': Rename_Task, 'duration_secs': 0.144203} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.620818] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 694.621127] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e208c2c8-29ee-43ea-847d-23bed7b05e08 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.628851] env[68638]: DEBUG oslo_vmware.api [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Waiting for the task: (returnval){ [ 694.628851] env[68638]: value = "task-2833288" [ 694.628851] env[68638]: _type = "Task" [ 694.628851] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.637993] env[68638]: DEBUG oslo_vmware.api [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833288, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.684265] env[68638]: DEBUG nova.compute.manager [req-2e2ff7dc-1fe5-4049-9225-15ba2704b64b req-4235c0b9-8add-407e-9b9c-8314da8c9229 service nova] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Received event network-changed-945ff6d9-4999-47aa-b917-48298ca743df {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 694.684265] env[68638]: DEBUG nova.compute.manager [req-2e2ff7dc-1fe5-4049-9225-15ba2704b64b req-4235c0b9-8add-407e-9b9c-8314da8c9229 service nova] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Refreshing instance network info cache due to event network-changed-945ff6d9-4999-47aa-b917-48298ca743df. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 694.684265] env[68638]: DEBUG oslo_concurrency.lockutils [req-2e2ff7dc-1fe5-4049-9225-15ba2704b64b req-4235c0b9-8add-407e-9b9c-8314da8c9229 service nova] Acquiring lock "refresh_cache-5a28d684-584b-4e13-9910-183119ce5d37" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.684265] env[68638]: DEBUG oslo_concurrency.lockutils [req-2e2ff7dc-1fe5-4049-9225-15ba2704b64b req-4235c0b9-8add-407e-9b9c-8314da8c9229 service nova] Acquired lock "refresh_cache-5a28d684-584b-4e13-9910-183119ce5d37" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.684265] env[68638]: DEBUG nova.network.neutron [req-2e2ff7dc-1fe5-4049-9225-15ba2704b64b req-4235c0b9-8add-407e-9b9c-8314da8c9229 service nova] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Refreshing network info cache for port 945ff6d9-4999-47aa-b917-48298ca743df {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 694.689297] env[68638]: DEBUG oslo_concurrency.lockutils [None req-851bb306-e524-42ed-a8b0-3c266e4552fd tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquiring lock "refresh_cache-168c2937-f8ce-472f-b21f-e48eed909f43" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.689297] env[68638]: DEBUG oslo_concurrency.lockutils [None req-851bb306-e524-42ed-a8b0-3c266e4552fd tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquired lock "refresh_cache-168c2937-f8ce-472f-b21f-e48eed909f43" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.689297] env[68638]: DEBUG nova.network.neutron [None req-851bb306-e524-42ed-a8b0-3c266e4552fd tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 694.706129] env[68638]: DEBUG oslo_vmware.api [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': task-2833283, 'name': PowerOnVM_Task, 'duration_secs': 0.80308} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.706408] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 694.706616] env[68638]: INFO nova.compute.manager [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Took 9.12 seconds to spawn the instance on the hypervisor. [ 694.706814] env[68638]: DEBUG nova.compute.manager [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 694.711018] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9745f788-97e1-43dc-b720-aadd42d16613 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.881333] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833287, 'name': CreateVM_Task, 'duration_secs': 0.346044} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.881333] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 694.882539] env[68638]: DEBUG oslo_concurrency.lockutils [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.882539] env[68638]: DEBUG oslo_concurrency.lockutils [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.882805] env[68638]: DEBUG oslo_concurrency.lockutils [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 694.883122] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ffde5a6-7129-4128-9e2f-0a1e7f587c7d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.888924] env[68638]: DEBUG oslo_vmware.api [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 694.888924] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52940d8a-f2c6-0739-634e-8f0dd294203c" [ 694.888924] env[68638]: _type = "Task" [ 694.888924] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.899776] env[68638]: DEBUG oslo_vmware.api [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52940d8a-f2c6-0739-634e-8f0dd294203c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.087826] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52319093-2a6b-d1c2-1d25-110ccb43608d, 'name': SearchDatastore_Task, 'duration_secs': 0.011066} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.089249] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18de1caf-359b-4b3f-a4ec-1af946df661e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.094385] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Waiting for the task: (returnval){ [ 695.094385] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52591f2f-e008-5b40-d681-1880e27d71e6" [ 695.094385] env[68638]: _type = "Task" [ 695.094385] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.103167] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52591f2f-e008-5b40-d681-1880e27d71e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.138743] env[68638]: DEBUG oslo_vmware.api [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833288, 'name': PowerOnVM_Task, 'duration_secs': 0.508583} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.141427] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 695.141642] env[68638]: INFO nova.compute.manager [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Took 15.13 seconds to spawn the instance on the hypervisor. [ 695.141814] env[68638]: DEBUG nova.compute.manager [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 695.142870] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96523d1c-e55c-49a3-9143-432465baf286 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.191537] env[68638]: DEBUG nova.compute.utils [None req-851bb306-e524-42ed-a8b0-3c266e4552fd tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Can not refresh info_cache because instance was not found {{(pid=68638) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1056}} [ 695.230464] env[68638]: DEBUG nova.network.neutron [None req-851bb306-e524-42ed-a8b0-3c266e4552fd tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 695.232136] env[68638]: INFO nova.compute.manager [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Took 47.49 seconds to build instance. [ 695.334010] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df751bb-9553-4082-8473-62d733440468 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.341336] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c81a15e0-b372-4486-923b-abefac76d23e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.376090] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f3186e-bac1-4ad7-8c61-73a7bf92101a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.383956] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-786c8e05-4f7a-48a8-8028-696fcfcecdbc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.402136] env[68638]: DEBUG nova.compute.provider_tree [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 695.409302] env[68638]: DEBUG oslo_vmware.api [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52940d8a-f2c6-0739-634e-8f0dd294203c, 'name': SearchDatastore_Task, 'duration_secs': 0.00988} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.409640] env[68638]: DEBUG oslo_concurrency.lockutils [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.409882] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 695.410140] env[68638]: DEBUG oslo_concurrency.lockutils [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.410288] env[68638]: DEBUG oslo_concurrency.lockutils [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.410465] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 695.410724] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb1ef3d9-b99c-454e-a764-d5415052951d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.421388] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 695.421605] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 695.425264] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59811c15-1751-43d8-80a1-a10a829e022f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.428272] env[68638]: DEBUG oslo_vmware.api [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 695.428272] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528ad00d-56aa-d89a-5cf6-fa8953fb407c" [ 695.428272] env[68638]: _type = "Task" [ 695.428272] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.436514] env[68638]: DEBUG oslo_vmware.api [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528ad00d-56aa-d89a-5cf6-fa8953fb407c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.604607] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52591f2f-e008-5b40-d681-1880e27d71e6, 'name': SearchDatastore_Task, 'duration_secs': 0.039499} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.604869] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.605145] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] f43dae1e-3442-450a-b9e8-3884504a2b38/f43dae1e-3442-450a-b9e8-3884504a2b38.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 695.605417] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0df47439-6e74-4dda-857d-ed13f85211d2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.612506] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Waiting for the task: (returnval){ [ 695.612506] env[68638]: value = "task-2833289" [ 695.612506] env[68638]: _type = "Task" [ 695.612506] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.620645] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833289, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.653610] env[68638]: DEBUG nova.network.neutron [None req-851bb306-e524-42ed-a8b0-3c266e4552fd tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.668450] env[68638]: INFO nova.compute.manager [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Took 57.33 seconds to build instance. [ 695.741771] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dbb7147a-e314-4255-a7d1-c7802f5e683e tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Lock "7b0b6eec-4681-4926-ad3f-5572e022a467" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.922s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.794047] env[68638]: DEBUG nova.network.neutron [req-2e2ff7dc-1fe5-4049-9225-15ba2704b64b req-4235c0b9-8add-407e-9b9c-8314da8c9229 service nova] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Updated VIF entry in instance network info cache for port 945ff6d9-4999-47aa-b917-48298ca743df. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 695.794392] env[68638]: DEBUG nova.network.neutron [req-2e2ff7dc-1fe5-4049-9225-15ba2704b64b req-4235c0b9-8add-407e-9b9c-8314da8c9229 service nova] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Updating instance_info_cache with network_info: [{"id": "945ff6d9-4999-47aa-b917-48298ca743df", "address": "fa:16:3e:2f:d2:66", "network": {"id": "104a324f-fd5a-4c74-9a7a-6126392ea10c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1310127541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3e5757d1f74492481048df4a29032ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap945ff6d9-49", "ovs_interfaceid": "945ff6d9-4999-47aa-b917-48298ca743df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.905533] env[68638]: DEBUG nova.scheduler.client.report [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 695.939436] env[68638]: DEBUG oslo_vmware.api [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528ad00d-56aa-d89a-5cf6-fa8953fb407c, 'name': SearchDatastore_Task, 'duration_secs': 0.019092} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.940267] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0990ef48-cc6e-4bdc-82c0-73f173f1769a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.945527] env[68638]: DEBUG oslo_vmware.api [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 695.945527] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52433322-1bb5-a849-bf12-3ff3cb672e0d" [ 695.945527] env[68638]: _type = "Task" [ 695.945527] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.955437] env[68638]: DEBUG oslo_vmware.api [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52433322-1bb5-a849-bf12-3ff3cb672e0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.125062] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833289, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.164909] env[68638]: DEBUG oslo_concurrency.lockutils [None req-851bb306-e524-42ed-a8b0-3c266e4552fd tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Releasing lock "refresh_cache-168c2937-f8ce-472f-b21f-e48eed909f43" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.165331] env[68638]: DEBUG nova.compute.manager [None req-851bb306-e524-42ed-a8b0-3c266e4552fd tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 696.165538] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-851bb306-e524-42ed-a8b0-3c266e4552fd tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 696.165851] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c85b3482-3646-44bb-a49e-5273d2516ddd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.175452] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-145ccc4a-1a4b-4a20-b0ab-3cb1258ed787 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.186731] env[68638]: DEBUG oslo_concurrency.lockutils [None req-af64821b-bb0e-406b-8b23-3d4f2ae3291f tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Lock "aaf0185b-1a85-4e0e-afb1-55e9e2417d76" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.348s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 696.215878] env[68638]: WARNING nova.virt.vmwareapi.vmops [None req-851bb306-e524-42ed-a8b0-3c266e4552fd tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 168c2937-f8ce-472f-b21f-e48eed909f43 could not be found. [ 696.216120] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-851bb306-e524-42ed-a8b0-3c266e4552fd tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 696.216316] env[68638]: INFO nova.compute.manager [None req-851bb306-e524-42ed-a8b0-3c266e4552fd tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Took 0.05 seconds to destroy the instance on the hypervisor. [ 696.216625] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-851bb306-e524-42ed-a8b0-3c266e4552fd tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 696.216862] env[68638]: DEBUG nova.compute.manager [-] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 696.216977] env[68638]: DEBUG nova.network.neutron [-] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 696.242404] env[68638]: DEBUG nova.network.neutron [-] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 696.244475] env[68638]: DEBUG nova.compute.manager [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 696.297714] env[68638]: DEBUG oslo_concurrency.lockutils [req-2e2ff7dc-1fe5-4049-9225-15ba2704b64b req-4235c0b9-8add-407e-9b9c-8314da8c9229 service nova] Releasing lock "refresh_cache-5a28d684-584b-4e13-9910-183119ce5d37" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.377249] env[68638]: DEBUG oslo_concurrency.lockutils [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquiring lock "e3cf739a-3104-473d-af66-d9974ed1a222" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.377666] env[68638]: DEBUG oslo_concurrency.lockutils [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Lock "e3cf739a-3104-473d-af66-d9974ed1a222" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 696.377988] env[68638]: DEBUG oslo_concurrency.lockutils [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquiring lock "e3cf739a-3104-473d-af66-d9974ed1a222-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.378275] env[68638]: DEBUG oslo_concurrency.lockutils [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Lock "e3cf739a-3104-473d-af66-d9974ed1a222-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 696.379195] env[68638]: DEBUG oslo_concurrency.lockutils [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Lock "e3cf739a-3104-473d-af66-d9974ed1a222-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 696.381907] env[68638]: INFO nova.compute.manager [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Terminating instance [ 696.410412] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.765s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 696.410946] env[68638]: DEBUG nova.compute.manager [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 696.414307] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.414s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 696.415662] env[68638]: INFO nova.compute.claims [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 696.456890] env[68638]: DEBUG oslo_vmware.api [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52433322-1bb5-a849-bf12-3ff3cb672e0d, 'name': SearchDatastore_Task, 'duration_secs': 0.011828} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.457252] env[68638]: DEBUG oslo_concurrency.lockutils [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.457581] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 5a28d684-584b-4e13-9910-183119ce5d37/5a28d684-584b-4e13-9910-183119ce5d37.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 696.457768] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de4aa815-5f9b-4f26-9aee-75d391919deb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.467732] env[68638]: DEBUG oslo_vmware.api [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 696.467732] env[68638]: value = "task-2833290" [ 696.467732] env[68638]: _type = "Task" [ 696.467732] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.476834] env[68638]: DEBUG oslo_vmware.api [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833290, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.627316] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833289, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.699277} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.627661] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] f43dae1e-3442-450a-b9e8-3884504a2b38/f43dae1e-3442-450a-b9e8-3884504a2b38.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 696.627967] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 696.628281] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-beb7767e-4238-4034-86b6-b0d2b82567ed {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.634754] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Waiting for the task: (returnval){ [ 696.634754] env[68638]: value = "task-2833291" [ 696.634754] env[68638]: _type = "Task" [ 696.634754] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.643449] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833291, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.690336] env[68638]: DEBUG nova.compute.manager [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 696.744949] env[68638]: DEBUG nova.network.neutron [-] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.768372] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.888995] env[68638]: DEBUG nova.compute.manager [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 696.888995] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 696.888995] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b25765-df85-45fb-8a3f-16879ad146be {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.898171] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 696.898439] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-397f81c1-7667-47ff-86bc-768f4fd14dcc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.905111] env[68638]: DEBUG oslo_vmware.api [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Waiting for the task: (returnval){ [ 696.905111] env[68638]: value = "task-2833292" [ 696.905111] env[68638]: _type = "Task" [ 696.905111] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.912835] env[68638]: DEBUG oslo_vmware.api [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2833292, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.920780] env[68638]: DEBUG nova.compute.utils [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 696.925012] env[68638]: DEBUG nova.compute.manager [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 696.925225] env[68638]: DEBUG nova.network.neutron [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 696.964767] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "772af0c0-a8dd-4167-87bc-617a9d95b54d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.964991] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "772af0c0-a8dd-4167-87bc-617a9d95b54d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 696.977668] env[68638]: DEBUG oslo_vmware.api [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833290, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.991130] env[68638]: DEBUG nova.policy [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '743fc626120649a2a9976d8eca4d1a76', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6d35b14c6a7b4c5db309a56c8c22cd1e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 697.147946] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833291, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.139579} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.148057] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 697.148893] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1bfdf6f-3811-449f-8d20-b6af9e6d986e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.175872] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] f43dae1e-3442-450a-b9e8-3884504a2b38/f43dae1e-3442-450a-b9e8-3884504a2b38.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 697.176326] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aac95c39-dcdb-419f-afaa-e5400275cf63 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.196534] env[68638]: DEBUG oslo_concurrency.lockutils [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Acquiring lock "7b0b6eec-4681-4926-ad3f-5572e022a467" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.197080] env[68638]: DEBUG oslo_concurrency.lockutils [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Lock "7b0b6eec-4681-4926-ad3f-5572e022a467" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.197443] env[68638]: DEBUG oslo_concurrency.lockutils [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Acquiring lock "7b0b6eec-4681-4926-ad3f-5572e022a467-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.197747] env[68638]: DEBUG oslo_concurrency.lockutils [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Lock "7b0b6eec-4681-4926-ad3f-5572e022a467-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.198085] env[68638]: DEBUG oslo_concurrency.lockutils [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Lock "7b0b6eec-4681-4926-ad3f-5572e022a467-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.204056] env[68638]: INFO nova.compute.manager [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Terminating instance [ 697.217375] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Waiting for the task: (returnval){ [ 697.217375] env[68638]: value = "task-2833293" [ 697.217375] env[68638]: _type = "Task" [ 697.217375] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.236952] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833293, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.238047] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.248297] env[68638]: INFO nova.compute.manager [-] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Took 1.03 seconds to deallocate network for instance. [ 697.405972] env[68638]: DEBUG nova.network.neutron [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Successfully created port: 9a4ba23f-3a11-4f1f-b92e-8260b30fe959 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 697.419641] env[68638]: DEBUG oslo_vmware.api [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2833292, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.425513] env[68638]: DEBUG nova.compute.manager [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 697.484597] env[68638]: DEBUG oslo_vmware.api [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833290, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.950827} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.484710] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 5a28d684-584b-4e13-9910-183119ce5d37/5a28d684-584b-4e13-9910-183119ce5d37.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 697.484954] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 697.485486] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9e814135-aa5a-4614-829a-b0cba54f936e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.492501] env[68638]: DEBUG oslo_vmware.api [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 697.492501] env[68638]: value = "task-2833294" [ 697.492501] env[68638]: _type = "Task" [ 697.492501] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.501399] env[68638]: DEBUG oslo_vmware.api [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833294, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.712572] env[68638]: DEBUG nova.compute.manager [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 697.712572] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 697.714090] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52d641dd-06e0-41f6-a509-0706c490249b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.729595] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 697.730567] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-535306f4-a344-4622-99d8-d6e770444eb4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.739043] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833293, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.751670] env[68638]: DEBUG oslo_vmware.api [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Waiting for the task: (returnval){ [ 697.751670] env[68638]: value = "task-2833295" [ 697.751670] env[68638]: _type = "Task" [ 697.751670] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.759607] env[68638]: INFO nova.compute.manager [None req-851bb306-e524-42ed-a8b0-3c266e4552fd tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Instance disappeared during terminate [ 697.759816] env[68638]: DEBUG oslo_concurrency.lockutils [None req-851bb306-e524-42ed-a8b0-3c266e4552fd tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Lock "168c2937-f8ce-472f-b21f-e48eed909f43" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.583s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.764952] env[68638]: DEBUG oslo_vmware.api [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': task-2833295, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.784823] env[68638]: DEBUG oslo_concurrency.lockutils [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Acquiring lock "aaf0185b-1a85-4e0e-afb1-55e9e2417d76" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.785484] env[68638]: DEBUG oslo_concurrency.lockutils [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Lock "aaf0185b-1a85-4e0e-afb1-55e9e2417d76" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.785484] env[68638]: DEBUG oslo_concurrency.lockutils [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Acquiring lock "aaf0185b-1a85-4e0e-afb1-55e9e2417d76-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.785660] env[68638]: DEBUG oslo_concurrency.lockutils [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Lock "aaf0185b-1a85-4e0e-afb1-55e9e2417d76-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.785763] env[68638]: DEBUG oslo_concurrency.lockutils [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Lock "aaf0185b-1a85-4e0e-afb1-55e9e2417d76-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.792941] env[68638]: INFO nova.compute.manager [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Terminating instance [ 697.920910] env[68638]: DEBUG oslo_vmware.api [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2833292, 'name': PowerOffVM_Task, 'duration_secs': 0.613644} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.921320] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 697.921320] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 697.924187] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f7046d85-dd6a-4e94-9054-104d0999b27e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.003304] env[68638]: DEBUG oslo_vmware.api [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833294, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.239133} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.003958] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 698.004895] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da8f11e-09d8-49a6-97ed-b3afef6ecff8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.030335] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] 5a28d684-584b-4e13-9910-183119ce5d37/5a28d684-584b-4e13-9910-183119ce5d37.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 698.033500] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d99195e2-7fea-4f17-9b65-7da599cf49d2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.055047] env[68638]: DEBUG oslo_vmware.api [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 698.055047] env[68638]: value = "task-2833297" [ 698.055047] env[68638]: _type = "Task" [ 698.055047] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.067063] env[68638]: DEBUG oslo_vmware.api [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833297, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.088990] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c3f561-0514-4cbf-b7c3-eb36a11ab02b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.096731] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b56c9859-99ab-4edd-9efd-d2a4cd131f6a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.132483] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18feb5fb-b05a-450a-afa9-7a663ce657e6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.135233] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 698.135447] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 698.135726] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Deleting the datastore file [datastore2] e3cf739a-3104-473d-af66-d9974ed1a222 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 698.135982] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f985f9c-fd0d-4e79-8d0e-28b3621ffb9d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.144357] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86cced7e-ab3a-4195-b079-a400263344bc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.148336] env[68638]: DEBUG oslo_vmware.api [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Waiting for the task: (returnval){ [ 698.148336] env[68638]: value = "task-2833298" [ 698.148336] env[68638]: _type = "Task" [ 698.148336] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.159840] env[68638]: DEBUG nova.compute.provider_tree [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 698.168753] env[68638]: DEBUG oslo_vmware.api [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2833298, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.232065] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833293, 'name': ReconfigVM_Task, 'duration_secs': 0.55469} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.232409] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Reconfigured VM instance instance-00000022 to attach disk [datastore1] f43dae1e-3442-450a-b9e8-3884504a2b38/f43dae1e-3442-450a-b9e8-3884504a2b38.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 698.233063] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9cb15f17-8267-492b-bb56-c1175a3d29d5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.239319] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Waiting for the task: (returnval){ [ 698.239319] env[68638]: value = "task-2833299" [ 698.239319] env[68638]: _type = "Task" [ 698.239319] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.254255] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833299, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.263383] env[68638]: DEBUG oslo_vmware.api [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': task-2833295, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.300807] env[68638]: DEBUG nova.compute.manager [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 698.301110] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 698.302621] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4faa0b44-7a50-4444-87b5-e9dc42c94d6c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.311531] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 698.311849] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f084f8b5-625d-497a-93b4-95c57d6333e5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.319328] env[68638]: DEBUG oslo_vmware.api [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Waiting for the task: (returnval){ [ 698.319328] env[68638]: value = "task-2833300" [ 698.319328] env[68638]: _type = "Task" [ 698.319328] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.328551] env[68638]: DEBUG oslo_vmware.api [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833300, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.438891] env[68638]: DEBUG nova.compute.manager [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 698.466517] env[68638]: DEBUG nova.virt.hardware [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 698.466517] env[68638]: DEBUG nova.virt.hardware [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 698.466517] env[68638]: DEBUG nova.virt.hardware [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 698.466517] env[68638]: DEBUG nova.virt.hardware [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 698.466517] env[68638]: DEBUG nova.virt.hardware [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 698.466517] env[68638]: DEBUG nova.virt.hardware [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 698.466517] env[68638]: DEBUG nova.virt.hardware [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 698.466517] env[68638]: DEBUG nova.virt.hardware [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 698.466517] env[68638]: DEBUG nova.virt.hardware [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 698.466855] env[68638]: DEBUG nova.virt.hardware [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 698.466855] env[68638]: DEBUG nova.virt.hardware [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 698.467592] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c531a09-e1a3-4892-a622-9b345c0d84d2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.475235] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b765f0-6f4a-4646-ad47-d0e95b6e1445 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.564775] env[68638]: DEBUG oslo_vmware.api [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833297, 'name': ReconfigVM_Task, 'duration_secs': 0.322137} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.565086] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Reconfigured VM instance instance-00000021 to attach disk [datastore2] 5a28d684-584b-4e13-9910-183119ce5d37/5a28d684-584b-4e13-9910-183119ce5d37.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 698.565738] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-017bc05e-788e-4644-b3c4-51c30aab6aed {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.572093] env[68638]: DEBUG oslo_vmware.api [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 698.572093] env[68638]: value = "task-2833301" [ 698.572093] env[68638]: _type = "Task" [ 698.572093] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.580586] env[68638]: DEBUG oslo_vmware.api [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833301, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.658618] env[68638]: DEBUG oslo_vmware.api [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Task: {'id': task-2833298, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.29598} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.658939] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 698.659157] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 698.659336] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 698.659513] env[68638]: INFO nova.compute.manager [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Took 1.77 seconds to destroy the instance on the hypervisor. [ 698.659801] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 698.659936] env[68638]: DEBUG nova.compute.manager [-] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 698.660042] env[68638]: DEBUG nova.network.neutron [-] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 698.664029] env[68638]: DEBUG nova.scheduler.client.report [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 698.750759] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833299, 'name': Rename_Task, 'duration_secs': 0.20956} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.751086] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 698.751340] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a59a8f07-83b6-4f4d-a0ae-88283f96bf72 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.758731] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Waiting for the task: (returnval){ [ 698.758731] env[68638]: value = "task-2833302" [ 698.758731] env[68638]: _type = "Task" [ 698.758731] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.765228] env[68638]: DEBUG oslo_vmware.api [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': task-2833295, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.769847] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833302, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.829151] env[68638]: DEBUG oslo_vmware.api [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833300, 'name': PowerOffVM_Task, 'duration_secs': 0.21098} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.829447] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 698.829615] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 698.829871] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8c5e0e87-01c4-43ec-9b7c-6a367f1f131a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.985295] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 698.985295] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 698.985558] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Deleting the datastore file [datastore2] aaf0185b-1a85-4e0e-afb1-55e9e2417d76 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 698.985840] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c86ede05-e1e1-4862-b3ec-9440cf1920a5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.992534] env[68638]: DEBUG oslo_vmware.api [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Waiting for the task: (returnval){ [ 698.992534] env[68638]: value = "task-2833304" [ 698.992534] env[68638]: _type = "Task" [ 698.992534] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.002485] env[68638]: DEBUG oslo_vmware.api [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833304, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.087360] env[68638]: DEBUG oslo_vmware.api [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833301, 'name': Rename_Task, 'duration_secs': 0.150216} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.087686] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 699.087958] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a7dc0e03-d206-469d-8a59-58f002d295d6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.094581] env[68638]: DEBUG oslo_vmware.api [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 699.094581] env[68638]: value = "task-2833305" [ 699.094581] env[68638]: _type = "Task" [ 699.094581] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.103683] env[68638]: DEBUG oslo_vmware.api [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833305, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.169696] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.756s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 699.170379] env[68638]: DEBUG nova.compute.manager [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 699.174127] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.619s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 699.174354] env[68638]: DEBUG nova.objects.instance [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Lazy-loading 'resources' on Instance uuid a5dedd3e-a544-4005-bc9b-0735267d6753 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 699.235565] env[68638]: DEBUG nova.network.neutron [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Successfully updated port: 9a4ba23f-3a11-4f1f-b92e-8260b30fe959 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 699.267234] env[68638]: DEBUG oslo_vmware.api [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': task-2833295, 'name': PowerOffVM_Task, 'duration_secs': 1.308892} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.268277] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 699.268496] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 699.268782] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e0b339b9-7c0a-4eb1-bc90-1f0472a79672 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.274640] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833302, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.331506] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 699.332162] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 699.332162] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Deleting the datastore file [datastore2] 7b0b6eec-4681-4926-ad3f-5572e022a467 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 699.332162] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d299831-ba7a-4d91-836a-fe8771587bf8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.339943] env[68638]: DEBUG nova.compute.manager [req-097cbfb6-0c9b-4bcc-b9b8-793a37c9373c req-d51fca2b-5c2d-4239-84e8-8e5523954be2 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Received event network-vif-deleted-63f69876-6edd-4869-b1f4-40bf4dd16383 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 699.340151] env[68638]: INFO nova.compute.manager [req-097cbfb6-0c9b-4bcc-b9b8-793a37c9373c req-d51fca2b-5c2d-4239-84e8-8e5523954be2 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Neutron deleted interface 63f69876-6edd-4869-b1f4-40bf4dd16383; detaching it from the instance and deleting it from the info cache [ 699.340399] env[68638]: DEBUG nova.network.neutron [req-097cbfb6-0c9b-4bcc-b9b8-793a37c9373c req-d51fca2b-5c2d-4239-84e8-8e5523954be2 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.346482] env[68638]: DEBUG oslo_vmware.api [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Waiting for the task: (returnval){ [ 699.346482] env[68638]: value = "task-2833307" [ 699.346482] env[68638]: _type = "Task" [ 699.346482] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.355110] env[68638]: DEBUG oslo_vmware.api [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': task-2833307, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.498685] env[68638]: DEBUG nova.network.neutron [-] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.503153] env[68638]: DEBUG oslo_vmware.api [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833304, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.359907} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.503783] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 699.504547] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 699.504547] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 699.505495] env[68638]: INFO nova.compute.manager [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Took 1.20 seconds to destroy the instance on the hypervisor. [ 699.505495] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 699.505495] env[68638]: DEBUG nova.compute.manager [-] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 699.505495] env[68638]: DEBUG nova.network.neutron [-] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 699.606035] env[68638]: DEBUG oslo_vmware.api [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833305, 'name': PowerOnVM_Task, 'duration_secs': 0.490023} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.606035] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 699.606035] env[68638]: INFO nova.compute.manager [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Took 8.97 seconds to spawn the instance on the hypervisor. [ 699.606035] env[68638]: DEBUG nova.compute.manager [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 699.606358] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ebf40ac-fdf1-448b-8e3d-52d80424c335 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.683724] env[68638]: DEBUG nova.compute.utils [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 699.689263] env[68638]: DEBUG nova.compute.manager [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 699.689609] env[68638]: DEBUG nova.network.neutron [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 699.740752] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Acquiring lock "refresh_cache-ebd7dd7a-2565-45da-bf7a-b8047c54ebe4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.740897] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Acquired lock "refresh_cache-ebd7dd7a-2565-45da-bf7a-b8047c54ebe4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 699.745103] env[68638]: DEBUG nova.network.neutron [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 699.773224] env[68638]: DEBUG oslo_vmware.api [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833302, 'name': PowerOnVM_Task, 'duration_secs': 0.854384} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.776501] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 699.776726] env[68638]: INFO nova.compute.manager [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Took 6.37 seconds to spawn the instance on the hypervisor. [ 699.776906] env[68638]: DEBUG nova.compute.manager [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 699.777998] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-256a7920-3d01-48fd-b7e6-f627ecd8a646 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.793151] env[68638]: DEBUG nova.policy [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a5fce0bf2fb44b84afd238d875790fbd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ccc24eaf6cf74d539558c0a736e18c3e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 699.847765] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f5182106-3aac-4b86-a26b-459042caa37d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.868946] env[68638]: DEBUG oslo_vmware.api [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Task: {'id': task-2833307, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.325682} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.873267] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 699.873379] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 699.873551] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 699.873717] env[68638]: INFO nova.compute.manager [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Took 2.16 seconds to destroy the instance on the hypervisor. [ 699.873996] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 699.874276] env[68638]: DEBUG nova.compute.manager [-] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 699.874372] env[68638]: DEBUG nova.network.neutron [-] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 699.878963] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e760a0a1-9313-4f2a-a5c5-ae8284ff49a4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.922248] env[68638]: DEBUG nova.compute.manager [req-097cbfb6-0c9b-4bcc-b9b8-793a37c9373c req-d51fca2b-5c2d-4239-84e8-8e5523954be2 service nova] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Detach interface failed, port_id=63f69876-6edd-4869-b1f4-40bf4dd16383, reason: Instance e3cf739a-3104-473d-af66-d9974ed1a222 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 700.004719] env[68638]: INFO nova.compute.manager [-] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Took 1.34 seconds to deallocate network for instance. [ 700.127580] env[68638]: INFO nova.compute.manager [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Took 48.66 seconds to build instance. [ 700.191181] env[68638]: DEBUG nova.compute.manager [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 700.296490] env[68638]: INFO nova.compute.manager [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Took 41.68 seconds to build instance. [ 700.314766] env[68638]: DEBUG nova.network.neutron [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 700.352678] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa93a3ed-438a-46f7-80ae-9680d3f412bc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.360972] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10989658-454e-4ff1-8bb6-4d24ffc7bfe1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.395025] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d1cb148-4a9e-4c2a-9aac-ad1fc1ad30bd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.403763] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca91c633-bb0b-4bcc-8ffa-daf74b8e7513 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.417287] env[68638]: DEBUG nova.compute.provider_tree [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 700.455998] env[68638]: DEBUG nova.network.neutron [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Successfully created port: bc37d458-421e-4ca1-a705-30c976b1fdbd {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 700.511551] env[68638]: DEBUG oslo_concurrency.lockutils [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 700.633020] env[68638]: DEBUG oslo_concurrency.lockutils [None req-738fd944-1eb6-4cb5-9c15-e2c67e3a8bb9 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "5a28d684-584b-4e13-9910-183119ce5d37" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.841s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.664588] env[68638]: DEBUG nova.network.neutron [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Updating instance_info_cache with network_info: [{"id": "9a4ba23f-3a11-4f1f-b92e-8260b30fe959", "address": "fa:16:3e:59:03:69", "network": {"id": "64a2601e-2897-41ce-a74b-835dac31210f", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1340811570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d35b14c6a7b4c5db309a56c8c22cd1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a4ba23f-3a", "ovs_interfaceid": "9a4ba23f-3a11-4f1f-b92e-8260b30fe959", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.722115] env[68638]: DEBUG nova.compute.manager [req-2867d69a-7119-49bd-aee1-b764d436b32f req-826357b0-3ece-4b78-8705-66d366cf99d5 service nova] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Received event network-vif-deleted-19168c10-c119-4308-9487-6e17b5861113 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 700.722115] env[68638]: INFO nova.compute.manager [req-2867d69a-7119-49bd-aee1-b764d436b32f req-826357b0-3ece-4b78-8705-66d366cf99d5 service nova] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Neutron deleted interface 19168c10-c119-4308-9487-6e17b5861113; detaching it from the instance and deleting it from the info cache [ 700.722115] env[68638]: DEBUG nova.network.neutron [req-2867d69a-7119-49bd-aee1-b764d436b32f req-826357b0-3ece-4b78-8705-66d366cf99d5 service nova] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.799734] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b350a55b-2b89-41c0-9008-3720f50435a1 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Lock "f43dae1e-3442-450a-b9e8-3884504a2b38" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.668s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.920843] env[68638]: DEBUG nova.scheduler.client.report [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 701.033909] env[68638]: DEBUG nova.network.neutron [-] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.139377] env[68638]: DEBUG nova.compute.manager [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 701.142346] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b7e44e0c-8797-4b02-8b12-cccc0b9d102f tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "5a28d684-584b-4e13-9910-183119ce5d37" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.146073] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b7e44e0c-8797-4b02-8b12-cccc0b9d102f tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "5a28d684-584b-4e13-9910-183119ce5d37" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.146073] env[68638]: DEBUG nova.compute.manager [None req-b7e44e0c-8797-4b02-8b12-cccc0b9d102f tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 701.146073] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00bc143a-3c61-46c1-ac4c-6b3e0c0a8bb2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.153017] env[68638]: DEBUG nova.compute.manager [None req-b7e44e0c-8797-4b02-8b12-cccc0b9d102f tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68638) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 701.153017] env[68638]: DEBUG nova.objects.instance [None req-b7e44e0c-8797-4b02-8b12-cccc0b9d102f tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lazy-loading 'flavor' on Instance uuid 5a28d684-584b-4e13-9910-183119ce5d37 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 701.167249] env[68638]: DEBUG nova.network.neutron [-] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.170566] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Releasing lock "refresh_cache-ebd7dd7a-2565-45da-bf7a-b8047c54ebe4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 701.170911] env[68638]: DEBUG nova.compute.manager [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Instance network_info: |[{"id": "9a4ba23f-3a11-4f1f-b92e-8260b30fe959", "address": "fa:16:3e:59:03:69", "network": {"id": "64a2601e-2897-41ce-a74b-835dac31210f", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1340811570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d35b14c6a7b4c5db309a56c8c22cd1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a4ba23f-3a", "ovs_interfaceid": "9a4ba23f-3a11-4f1f-b92e-8260b30fe959", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 701.171631] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:03:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6def6dc5-d564-45ca-8f4f-7c820677e6e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a4ba23f-3a11-4f1f-b92e-8260b30fe959', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 701.181475] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Creating folder: Project (6d35b14c6a7b4c5db309a56c8c22cd1e). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 701.182321] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d6011e48-2a4a-476f-9100-1ddc45818366 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.197638] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Created folder: Project (6d35b14c6a7b4c5db309a56c8c22cd1e) in parent group-v569734. [ 701.197638] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Creating folder: Instances. Parent ref: group-v569837. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 701.197638] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6f6b2613-8aa3-4fab-a759-e786ec75fae6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.201923] env[68638]: DEBUG nova.compute.manager [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 701.206162] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Created folder: Instances in parent group-v569837. [ 701.206396] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 701.207307] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 701.207307] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0e88d031-69f8-446c-b541-0d23d247cdda {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.228851] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f06822dd-3f8a-4a87-aabe-642d1557ff50 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.238433] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 701.238433] env[68638]: value = "task-2833310" [ 701.238433] env[68638]: _type = "Task" [ 701.238433] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.240668] env[68638]: DEBUG nova.virt.hardware [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 701.240906] env[68638]: DEBUG nova.virt.hardware [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 701.241078] env[68638]: DEBUG nova.virt.hardware [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 701.241268] env[68638]: DEBUG nova.virt.hardware [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 701.241415] env[68638]: DEBUG nova.virt.hardware [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 701.241556] env[68638]: DEBUG nova.virt.hardware [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 701.241760] env[68638]: DEBUG nova.virt.hardware [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 701.241916] env[68638]: DEBUG nova.virt.hardware [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 701.242090] env[68638]: DEBUG nova.virt.hardware [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 701.242311] env[68638]: DEBUG nova.virt.hardware [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 701.242482] env[68638]: DEBUG nova.virt.hardware [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 701.245430] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0988efd-0d3a-4852-9e01-59c05de93062 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.256950] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b67e4f-995e-4a69-aaa4-06b79b6a8b28 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.271277] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb847578-2d8e-46e7-8d18-15731afd83da {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.274712] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833310, 'name': CreateVM_Task} progress is 15%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.286973] env[68638]: DEBUG nova.compute.manager [req-2867d69a-7119-49bd-aee1-b764d436b32f req-826357b0-3ece-4b78-8705-66d366cf99d5 service nova] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Detach interface failed, port_id=19168c10-c119-4308-9487-6e17b5861113, reason: Instance 7b0b6eec-4681-4926-ad3f-5572e022a467 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 701.303185] env[68638]: DEBUG nova.compute.manager [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 701.372154] env[68638]: DEBUG nova.compute.manager [req-05dee1f2-c633-4572-9e1d-6cebdae28b73 req-8d4f8e84-667c-4983-9626-6b8cb9041bb2 service nova] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Received event network-vif-plugged-9a4ba23f-3a11-4f1f-b92e-8260b30fe959 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 701.372154] env[68638]: DEBUG oslo_concurrency.lockutils [req-05dee1f2-c633-4572-9e1d-6cebdae28b73 req-8d4f8e84-667c-4983-9626-6b8cb9041bb2 service nova] Acquiring lock "ebd7dd7a-2565-45da-bf7a-b8047c54ebe4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.372154] env[68638]: DEBUG oslo_concurrency.lockutils [req-05dee1f2-c633-4572-9e1d-6cebdae28b73 req-8d4f8e84-667c-4983-9626-6b8cb9041bb2 service nova] Lock "ebd7dd7a-2565-45da-bf7a-b8047c54ebe4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.376205] env[68638]: DEBUG oslo_concurrency.lockutils [req-05dee1f2-c633-4572-9e1d-6cebdae28b73 req-8d4f8e84-667c-4983-9626-6b8cb9041bb2 service nova] Lock "ebd7dd7a-2565-45da-bf7a-b8047c54ebe4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.376205] env[68638]: DEBUG nova.compute.manager [req-05dee1f2-c633-4572-9e1d-6cebdae28b73 req-8d4f8e84-667c-4983-9626-6b8cb9041bb2 service nova] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] No waiting events found dispatching network-vif-plugged-9a4ba23f-3a11-4f1f-b92e-8260b30fe959 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 701.376205] env[68638]: WARNING nova.compute.manager [req-05dee1f2-c633-4572-9e1d-6cebdae28b73 req-8d4f8e84-667c-4983-9626-6b8cb9041bb2 service nova] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Received unexpected event network-vif-plugged-9a4ba23f-3a11-4f1f-b92e-8260b30fe959 for instance with vm_state building and task_state spawning. [ 701.376205] env[68638]: DEBUG nova.compute.manager [req-05dee1f2-c633-4572-9e1d-6cebdae28b73 req-8d4f8e84-667c-4983-9626-6b8cb9041bb2 service nova] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Received event network-changed-9a4ba23f-3a11-4f1f-b92e-8260b30fe959 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 701.376205] env[68638]: DEBUG nova.compute.manager [req-05dee1f2-c633-4572-9e1d-6cebdae28b73 req-8d4f8e84-667c-4983-9626-6b8cb9041bb2 service nova] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Refreshing instance network info cache due to event network-changed-9a4ba23f-3a11-4f1f-b92e-8260b30fe959. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 701.376205] env[68638]: DEBUG oslo_concurrency.lockutils [req-05dee1f2-c633-4572-9e1d-6cebdae28b73 req-8d4f8e84-667c-4983-9626-6b8cb9041bb2 service nova] Acquiring lock "refresh_cache-ebd7dd7a-2565-45da-bf7a-b8047c54ebe4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.376205] env[68638]: DEBUG oslo_concurrency.lockutils [req-05dee1f2-c633-4572-9e1d-6cebdae28b73 req-8d4f8e84-667c-4983-9626-6b8cb9041bb2 service nova] Acquired lock "refresh_cache-ebd7dd7a-2565-45da-bf7a-b8047c54ebe4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 701.376205] env[68638]: DEBUG nova.network.neutron [req-05dee1f2-c633-4572-9e1d-6cebdae28b73 req-8d4f8e84-667c-4983-9626-6b8cb9041bb2 service nova] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Refreshing network info cache for port 9a4ba23f-3a11-4f1f-b92e-8260b30fe959 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 701.428831] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.255s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.431740] env[68638]: DEBUG oslo_concurrency.lockutils [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.842s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.437508] env[68638]: INFO nova.compute.claims [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 701.457898] env[68638]: INFO nova.scheduler.client.report [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Deleted allocations for instance a5dedd3e-a544-4005-bc9b-0735267d6753 [ 701.536678] env[68638]: INFO nova.compute.manager [-] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Took 1.66 seconds to deallocate network for instance. [ 701.665595] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.672907] env[68638]: INFO nova.compute.manager [-] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Took 2.17 seconds to deallocate network for instance. [ 701.766780] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833310, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.827938] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.965576] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3783bd3-ca91-416c-a788-b8cba62dae3f tempest-ImagesOneServerTestJSON-1072539284 tempest-ImagesOneServerTestJSON-1072539284-project-member] Lock "a5dedd3e-a544-4005-bc9b-0735267d6753" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.409s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 702.048549] env[68638]: DEBUG oslo_concurrency.lockutils [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 702.162459] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7e44e0c-8797-4b02-8b12-cccc0b9d102f tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 702.162459] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-decfea66-ec73-4689-8dfb-3b0c63dece83 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.170214] env[68638]: DEBUG oslo_vmware.api [None req-b7e44e0c-8797-4b02-8b12-cccc0b9d102f tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 702.170214] env[68638]: value = "task-2833311" [ 702.170214] env[68638]: _type = "Task" [ 702.170214] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.182453] env[68638]: DEBUG oslo_concurrency.lockutils [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 702.184560] env[68638]: DEBUG oslo_vmware.api [None req-b7e44e0c-8797-4b02-8b12-cccc0b9d102f tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833311, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.220875] env[68638]: DEBUG nova.network.neutron [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Successfully updated port: bc37d458-421e-4ca1-a705-30c976b1fdbd {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 702.237374] env[68638]: INFO nova.compute.manager [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Rebuilding instance [ 702.250498] env[68638]: DEBUG nova.network.neutron [req-05dee1f2-c633-4572-9e1d-6cebdae28b73 req-8d4f8e84-667c-4983-9626-6b8cb9041bb2 service nova] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Updated VIF entry in instance network info cache for port 9a4ba23f-3a11-4f1f-b92e-8260b30fe959. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 702.250813] env[68638]: DEBUG nova.network.neutron [req-05dee1f2-c633-4572-9e1d-6cebdae28b73 req-8d4f8e84-667c-4983-9626-6b8cb9041bb2 service nova] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Updating instance_info_cache with network_info: [{"id": "9a4ba23f-3a11-4f1f-b92e-8260b30fe959", "address": "fa:16:3e:59:03:69", "network": {"id": "64a2601e-2897-41ce-a74b-835dac31210f", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1340811570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d35b14c6a7b4c5db309a56c8c22cd1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a4ba23f-3a", "ovs_interfaceid": "9a4ba23f-3a11-4f1f-b92e-8260b30fe959", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.267126] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833310, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.296870] env[68638]: DEBUG nova.compute.manager [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 702.298030] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a41bdc1b-6c8e-4282-a6b3-f5d02f236885 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.682419] env[68638]: DEBUG oslo_vmware.api [None req-b7e44e0c-8797-4b02-8b12-cccc0b9d102f tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833311, 'name': PowerOffVM_Task, 'duration_secs': 0.419005} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.682419] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7e44e0c-8797-4b02-8b12-cccc0b9d102f tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 702.682610] env[68638]: DEBUG nova.compute.manager [None req-b7e44e0c-8797-4b02-8b12-cccc0b9d102f tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 702.686601] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5039737c-f320-4df4-94a4-7629f4ad73f5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.726244] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "refresh_cache-1b176c5d-e77c-410b-b282-b7bba65359a9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.726390] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquired lock "refresh_cache-1b176c5d-e77c-410b-b282-b7bba65359a9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 702.726540] env[68638]: DEBUG nova.network.neutron [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 702.757457] env[68638]: DEBUG oslo_concurrency.lockutils [req-05dee1f2-c633-4572-9e1d-6cebdae28b73 req-8d4f8e84-667c-4983-9626-6b8cb9041bb2 service nova] Releasing lock "refresh_cache-ebd7dd7a-2565-45da-bf7a-b8047c54ebe4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 702.757700] env[68638]: DEBUG nova.compute.manager [req-05dee1f2-c633-4572-9e1d-6cebdae28b73 req-8d4f8e84-667c-4983-9626-6b8cb9041bb2 service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Received event network-vif-deleted-24a47fde-b177-4dfe-af1b-12b1396cf1a4 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 702.757881] env[68638]: DEBUG nova.compute.manager [req-05dee1f2-c633-4572-9e1d-6cebdae28b73 req-8d4f8e84-667c-4983-9626-6b8cb9041bb2 service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Received event network-vif-deleted-286b0758-18ef-4ab0-bf2c-05e465a216ad {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 702.758065] env[68638]: DEBUG nova.compute.manager [req-05dee1f2-c633-4572-9e1d-6cebdae28b73 req-8d4f8e84-667c-4983-9626-6b8cb9041bb2 service nova] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Received event network-vif-deleted-6cc59411-51bc-4b50-8095-b1d16aac6e44 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 702.767512] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833310, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.062375] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f0c823-d9cd-4761-8573-939a708d8c43 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.071659] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b125c335-cb17-4177-95da-f115fbf4ee75 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.105507] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6543348d-f2aa-4000-a834-29fe43b4a7f3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.113975] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82412427-0888-412d-a543-cf75b8bfa9a0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.127628] env[68638]: DEBUG nova.compute.provider_tree [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.202700] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b7e44e0c-8797-4b02-8b12-cccc0b9d102f tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "5a28d684-584b-4e13-9910-183119ce5d37" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.060s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 703.269595] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833310, 'name': CreateVM_Task, 'duration_secs': 1.694518} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.269786] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 703.270535] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.270741] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 703.271122] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 703.272088] env[68638]: DEBUG nova.network.neutron [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 703.274169] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15a4c2b2-f258-4d22-a4b5-70d1d9355d9b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.281137] env[68638]: DEBUG oslo_vmware.api [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Waiting for the task: (returnval){ [ 703.281137] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]524e7557-34f8-b26f-7e4d-addab9af2243" [ 703.281137] env[68638]: _type = "Task" [ 703.281137] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.290231] env[68638]: DEBUG oslo_vmware.api [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]524e7557-34f8-b26f-7e4d-addab9af2243, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.311482] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 703.311860] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7702137c-694c-4e09-bfa8-18b323bb6b4d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.322690] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Waiting for the task: (returnval){ [ 703.322690] env[68638]: value = "task-2833312" [ 703.322690] env[68638]: _type = "Task" [ 703.322690] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.337871] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833312, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.502878] env[68638]: DEBUG nova.network.neutron [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Updating instance_info_cache with network_info: [{"id": "bc37d458-421e-4ca1-a705-30c976b1fdbd", "address": "fa:16:3e:a6:47:0d", "network": {"id": "5de0e424-8bf1-4515-8c49-06607ad85c61", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1760008184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc24eaf6cf74d539558c0a736e18c3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc37d458-42", "ovs_interfaceid": "bc37d458-421e-4ca1-a705-30c976b1fdbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.598551] env[68638]: DEBUG nova.compute.manager [req-fa00cd4c-0de1-4317-a3f6-755e6f9a51a9 req-b7259a57-6850-4af7-aac9-bbca4c4269ec service nova] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Received event network-vif-plugged-bc37d458-421e-4ca1-a705-30c976b1fdbd {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 703.598775] env[68638]: DEBUG oslo_concurrency.lockutils [req-fa00cd4c-0de1-4317-a3f6-755e6f9a51a9 req-b7259a57-6850-4af7-aac9-bbca4c4269ec service nova] Acquiring lock "1b176c5d-e77c-410b-b282-b7bba65359a9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.599256] env[68638]: DEBUG oslo_concurrency.lockutils [req-fa00cd4c-0de1-4317-a3f6-755e6f9a51a9 req-b7259a57-6850-4af7-aac9-bbca4c4269ec service nova] Lock "1b176c5d-e77c-410b-b282-b7bba65359a9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.599434] env[68638]: DEBUG oslo_concurrency.lockutils [req-fa00cd4c-0de1-4317-a3f6-755e6f9a51a9 req-b7259a57-6850-4af7-aac9-bbca4c4269ec service nova] Lock "1b176c5d-e77c-410b-b282-b7bba65359a9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 703.599610] env[68638]: DEBUG nova.compute.manager [req-fa00cd4c-0de1-4317-a3f6-755e6f9a51a9 req-b7259a57-6850-4af7-aac9-bbca4c4269ec service nova] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] No waiting events found dispatching network-vif-plugged-bc37d458-421e-4ca1-a705-30c976b1fdbd {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 703.599760] env[68638]: WARNING nova.compute.manager [req-fa00cd4c-0de1-4317-a3f6-755e6f9a51a9 req-b7259a57-6850-4af7-aac9-bbca4c4269ec service nova] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Received unexpected event network-vif-plugged-bc37d458-421e-4ca1-a705-30c976b1fdbd for instance with vm_state building and task_state spawning. [ 703.599915] env[68638]: DEBUG nova.compute.manager [req-fa00cd4c-0de1-4317-a3f6-755e6f9a51a9 req-b7259a57-6850-4af7-aac9-bbca4c4269ec service nova] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Received event network-changed-bc37d458-421e-4ca1-a705-30c976b1fdbd {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 703.600076] env[68638]: DEBUG nova.compute.manager [req-fa00cd4c-0de1-4317-a3f6-755e6f9a51a9 req-b7259a57-6850-4af7-aac9-bbca4c4269ec service nova] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Refreshing instance network info cache due to event network-changed-bc37d458-421e-4ca1-a705-30c976b1fdbd. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 703.600310] env[68638]: DEBUG oslo_concurrency.lockutils [req-fa00cd4c-0de1-4317-a3f6-755e6f9a51a9 req-b7259a57-6850-4af7-aac9-bbca4c4269ec service nova] Acquiring lock "refresh_cache-1b176c5d-e77c-410b-b282-b7bba65359a9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.634048] env[68638]: DEBUG nova.scheduler.client.report [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 703.794668] env[68638]: DEBUG oslo_vmware.api [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]524e7557-34f8-b26f-7e4d-addab9af2243, 'name': SearchDatastore_Task, 'duration_secs': 0.012405} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.794668] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 703.794668] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 703.794668] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.794668] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 703.794668] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 703.794991] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fe348a66-f56d-440f-8421-08e13eadc9da {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.803463] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 703.803463] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 703.804094] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61f3c83e-5f4f-4ed4-ab7f-753b762ad2ce {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.810295] env[68638]: DEBUG oslo_vmware.api [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Waiting for the task: (returnval){ [ 703.810295] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52691d6c-4151-5fbf-9363-e1bdc5268ccf" [ 703.810295] env[68638]: _type = "Task" [ 703.810295] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.818263] env[68638]: DEBUG oslo_vmware.api [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52691d6c-4151-5fbf-9363-e1bdc5268ccf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.832264] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833312, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.006628] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Releasing lock "refresh_cache-1b176c5d-e77c-410b-b282-b7bba65359a9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 704.006987] env[68638]: DEBUG nova.compute.manager [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Instance network_info: |[{"id": "bc37d458-421e-4ca1-a705-30c976b1fdbd", "address": "fa:16:3e:a6:47:0d", "network": {"id": "5de0e424-8bf1-4515-8c49-06607ad85c61", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1760008184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc24eaf6cf74d539558c0a736e18c3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc37d458-42", "ovs_interfaceid": "bc37d458-421e-4ca1-a705-30c976b1fdbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 704.007872] env[68638]: DEBUG oslo_concurrency.lockutils [req-fa00cd4c-0de1-4317-a3f6-755e6f9a51a9 req-b7259a57-6850-4af7-aac9-bbca4c4269ec service nova] Acquired lock "refresh_cache-1b176c5d-e77c-410b-b282-b7bba65359a9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 704.008089] env[68638]: DEBUG nova.network.neutron [req-fa00cd4c-0de1-4317-a3f6-755e6f9a51a9 req-b7259a57-6850-4af7-aac9-bbca4c4269ec service nova] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Refreshing network info cache for port bc37d458-421e-4ca1-a705-30c976b1fdbd {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 704.009976] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:47:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08fb4857-7f9b-4f97-86ef-415341fb595d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bc37d458-421e-4ca1-a705-30c976b1fdbd', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 704.018529] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Creating folder: Project (ccc24eaf6cf74d539558c0a736e18c3e). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 704.019057] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2013b8d3-790c-43e9-9ae7-44ae428610b9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.029541] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Created folder: Project (ccc24eaf6cf74d539558c0a736e18c3e) in parent group-v569734. [ 704.029753] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Creating folder: Instances. Parent ref: group-v569840. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 704.030234] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-262681be-2265-46a0-aac4-3c2829036818 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.039679] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Created folder: Instances in parent group-v569840. [ 704.039812] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 704.040077] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 704.040517] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1369b092-e6c4-4187-b24f-0b827c4e07af {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.060130] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 704.060130] env[68638]: value = "task-2833315" [ 704.060130] env[68638]: _type = "Task" [ 704.060130] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.069583] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833315, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.136842] env[68638]: DEBUG oslo_concurrency.lockutils [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.705s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 704.137400] env[68638]: DEBUG nova.compute.manager [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 704.141164] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.118s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 704.142785] env[68638]: INFO nova.compute.claims [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 704.321459] env[68638]: DEBUG oslo_vmware.api [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52691d6c-4151-5fbf-9363-e1bdc5268ccf, 'name': SearchDatastore_Task, 'duration_secs': 0.008468} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.322342] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7343e6e6-1e20-4ff4-84a8-4178cbfa3db2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.331069] env[68638]: DEBUG oslo_vmware.api [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Waiting for the task: (returnval){ [ 704.331069] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52dda9bc-9369-312f-a95f-d48b839b8c14" [ 704.331069] env[68638]: _type = "Task" [ 704.331069] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.335587] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833312, 'name': PowerOffVM_Task, 'duration_secs': 0.569785} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.342524] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 704.345153] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 704.349021] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d79f5b-9b38-4017-b36b-7e03026517c3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.356670] env[68638]: DEBUG oslo_vmware.api [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52dda9bc-9369-312f-a95f-d48b839b8c14, 'name': SearchDatastore_Task, 'duration_secs': 0.009519} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.359295] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 704.359757] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] ebd7dd7a-2565-45da-bf7a-b8047c54ebe4/ebd7dd7a-2565-45da-bf7a-b8047c54ebe4.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 704.360082] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 704.360327] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-075f237c-5a10-4264-97cc-45a74129a1be {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.362479] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c017431b-1bda-4aed-b7cc-1265792a178f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.371573] env[68638]: DEBUG oslo_vmware.api [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Waiting for the task: (returnval){ [ 704.371573] env[68638]: value = "task-2833316" [ 704.371573] env[68638]: _type = "Task" [ 704.371573] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.383124] env[68638]: DEBUG oslo_vmware.api [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Task: {'id': task-2833316, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.388993] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 704.389235] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 704.389414] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Deleting the datastore file [datastore1] f43dae1e-3442-450a-b9e8-3884504a2b38 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 704.389681] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ade2199e-c67e-4290-b1bc-e59eb0f4b547 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.397464] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Waiting for the task: (returnval){ [ 704.397464] env[68638]: value = "task-2833318" [ 704.397464] env[68638]: _type = "Task" [ 704.397464] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.408045] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833318, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.571571] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833315, 'name': CreateVM_Task, 'duration_secs': 0.351403} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.574538] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 704.575413] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.575573] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 704.576861] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 704.578510] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25bb25bb-09c2-4d3d-acd0-7f9d501da4db {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.585813] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 704.585813] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52c5e468-2bf1-c0d8-9b75-273ff3175fac" [ 704.585813] env[68638]: _type = "Task" [ 704.585813] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.596317] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52c5e468-2bf1-c0d8-9b75-273ff3175fac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.656087] env[68638]: DEBUG nova.compute.utils [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 704.661753] env[68638]: DEBUG nova.compute.manager [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Not allocating networking since 'none' was specified. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 704.887721] env[68638]: DEBUG oslo_vmware.api [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Task: {'id': task-2833316, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509052} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.888088] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] ebd7dd7a-2565-45da-bf7a-b8047c54ebe4/ebd7dd7a-2565-45da-bf7a-b8047c54ebe4.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 704.888400] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 704.888709] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f6e2071e-6097-404f-a8c5-f6bbe99f62fa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.895374] env[68638]: DEBUG oslo_vmware.api [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Waiting for the task: (returnval){ [ 704.895374] env[68638]: value = "task-2833319" [ 704.895374] env[68638]: _type = "Task" [ 704.895374] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.913887] env[68638]: DEBUG oslo_vmware.api [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Task: {'id': task-2833319, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.918861] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833318, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.094065} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.918861] env[68638]: DEBUG nova.network.neutron [req-fa00cd4c-0de1-4317-a3f6-755e6f9a51a9 req-b7259a57-6850-4af7-aac9-bbca4c4269ec service nova] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Updated VIF entry in instance network info cache for port bc37d458-421e-4ca1-a705-30c976b1fdbd. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 704.919204] env[68638]: DEBUG nova.network.neutron [req-fa00cd4c-0de1-4317-a3f6-755e6f9a51a9 req-b7259a57-6850-4af7-aac9-bbca4c4269ec service nova] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Updating instance_info_cache with network_info: [{"id": "bc37d458-421e-4ca1-a705-30c976b1fdbd", "address": "fa:16:3e:a6:47:0d", "network": {"id": "5de0e424-8bf1-4515-8c49-06607ad85c61", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1760008184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc24eaf6cf74d539558c0a736e18c3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc37d458-42", "ovs_interfaceid": "bc37d458-421e-4ca1-a705-30c976b1fdbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.923549] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 704.923549] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 704.923549] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 705.098091] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52c5e468-2bf1-c0d8-9b75-273ff3175fac, 'name': SearchDatastore_Task, 'duration_secs': 0.061004} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.098787] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 705.099114] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 705.099379] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.099531] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 705.099839] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 705.100323] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-09abfd27-6822-4bec-aeed-bb10a3cf66a0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.108593] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 705.108770] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 705.109576] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5490c971-f2fd-47a2-81f6-fe197cf49e9f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.116027] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 705.116027] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528bf794-d230-4a61-c834-df51aa79750a" [ 705.116027] env[68638]: _type = "Task" [ 705.116027] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.123586] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528bf794-d230-4a61-c834-df51aa79750a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.166732] env[68638]: DEBUG nova.compute.manager [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 705.178741] env[68638]: DEBUG nova.compute.manager [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 705.179642] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9272487-f4d8-4df0-ab1b-2ebeff385d57 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.405246] env[68638]: DEBUG oslo_vmware.api [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Task: {'id': task-2833319, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06044} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.407810] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 705.408837] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-975207e8-9795-4771-9a38-cb86a79da9cd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.434278] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] ebd7dd7a-2565-45da-bf7a-b8047c54ebe4/ebd7dd7a-2565-45da-bf7a-b8047c54ebe4.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 705.437767] env[68638]: DEBUG oslo_concurrency.lockutils [req-fa00cd4c-0de1-4317-a3f6-755e6f9a51a9 req-b7259a57-6850-4af7-aac9-bbca4c4269ec service nova] Releasing lock "refresh_cache-1b176c5d-e77c-410b-b282-b7bba65359a9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 705.442549] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b470d7a1-e627-42cc-83d4-e335e9f7e968 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.464946] env[68638]: DEBUG oslo_vmware.api [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Waiting for the task: (returnval){ [ 705.464946] env[68638]: value = "task-2833320" [ 705.464946] env[68638]: _type = "Task" [ 705.464946] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.480927] env[68638]: DEBUG oslo_vmware.api [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Task: {'id': task-2833320, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.626215] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528bf794-d230-4a61-c834-df51aa79750a, 'name': SearchDatastore_Task, 'duration_secs': 0.007289} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.627071] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c68a77f6-9eee-4ed4-a168-994953a078d2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.636451] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 705.636451] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52183203-68c3-6c0c-5950-8ee520759663" [ 705.636451] env[68638]: _type = "Task" [ 705.636451] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.646675] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52183203-68c3-6c0c-5950-8ee520759663, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.694888] env[68638]: INFO nova.compute.manager [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] instance snapshotting [ 705.695368] env[68638]: WARNING nova.compute.manager [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 705.701537] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b1c195-71c6-4b5f-a0f2-10601a59a9b4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.728564] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66247338-1cf0-4313-a7f9-8df8292fd6ca {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.839015] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b193aa3-6055-40f9-8493-6b3bbe816076 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.845658] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88b45128-8adc-4580-9bfe-c67ff57f8d48 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.878579] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84258bd-9f30-433a-bb8c-fe7c2e2bde71 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.886142] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17071640-8f58-4f7b-9e41-e77dcf49b74f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.900667] env[68638]: DEBUG nova.compute.provider_tree [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 705.981254] env[68638]: DEBUG oslo_vmware.api [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Task: {'id': task-2833320, 'name': ReconfigVM_Task, 'duration_secs': 0.26716} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.981254] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Reconfigured VM instance instance-00000023 to attach disk [datastore1] ebd7dd7a-2565-45da-bf7a-b8047c54ebe4/ebd7dd7a-2565-45da-bf7a-b8047c54ebe4.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 705.983817] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e43b6b58-8740-43cf-b675-c466417f4405 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.991966] env[68638]: DEBUG oslo_vmware.api [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Waiting for the task: (returnval){ [ 705.991966] env[68638]: value = "task-2833321" [ 705.991966] env[68638]: _type = "Task" [ 705.991966] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.001059] env[68638]: DEBUG nova.virt.hardware [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 706.001059] env[68638]: DEBUG nova.virt.hardware [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 706.001059] env[68638]: DEBUG nova.virt.hardware [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 706.001059] env[68638]: DEBUG nova.virt.hardware [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 706.001059] env[68638]: DEBUG nova.virt.hardware [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 706.001460] env[68638]: DEBUG nova.virt.hardware [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 706.001581] env[68638]: DEBUG nova.virt.hardware [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 706.001978] env[68638]: DEBUG nova.virt.hardware [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 706.002076] env[68638]: DEBUG nova.virt.hardware [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 706.002219] env[68638]: DEBUG nova.virt.hardware [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 706.003468] env[68638]: DEBUG nova.virt.hardware [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 706.003696] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c3d1cac-7af0-4457-a5ef-7f07a79d9c95 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.010624] env[68638]: DEBUG oslo_vmware.api [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Task: {'id': task-2833321, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.016540] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dca509e-4e60-4f28-bea6-02a24bd52914 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.038460] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Instance VIF info [] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 706.044292] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 706.044630] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 706.044881] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-26068643-1e19-4483-be5e-445229e7a4c8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.063888] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 706.063888] env[68638]: value = "task-2833322" [ 706.063888] env[68638]: _type = "Task" [ 706.063888] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.078163] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833322, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.147856] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52183203-68c3-6c0c-5950-8ee520759663, 'name': SearchDatastore_Task, 'duration_secs': 0.011477} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.147856] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 706.147856] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 1b176c5d-e77c-410b-b282-b7bba65359a9/1b176c5d-e77c-410b-b282-b7bba65359a9.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 706.148203] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e1387d4b-6e27-4566-ba65-9de9859e5b42 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.154383] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 706.154383] env[68638]: value = "task-2833323" [ 706.154383] env[68638]: _type = "Task" [ 706.154383] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.163850] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833323, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.180789] env[68638]: DEBUG nova.compute.manager [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 706.208087] env[68638]: DEBUG nova.virt.hardware [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 706.208357] env[68638]: DEBUG nova.virt.hardware [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 706.208520] env[68638]: DEBUG nova.virt.hardware [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 706.208700] env[68638]: DEBUG nova.virt.hardware [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 706.208845] env[68638]: DEBUG nova.virt.hardware [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 706.209183] env[68638]: DEBUG nova.virt.hardware [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 706.209285] env[68638]: DEBUG nova.virt.hardware [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 706.209360] env[68638]: DEBUG nova.virt.hardware [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 706.209519] env[68638]: DEBUG nova.virt.hardware [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 706.209672] env[68638]: DEBUG nova.virt.hardware [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 706.209840] env[68638]: DEBUG nova.virt.hardware [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 706.213017] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f1608e7-95e6-4864-bdb2-b103703adc11 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.219093] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1785520-5f6c-403d-be1f-7cc41c085b5d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.235020] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Instance VIF info [] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 706.238299] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Creating folder: Project (6560695925e44dba9136a8d16a0c847d). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 706.239431] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Creating Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 706.239656] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3bc4e983-4a44-4f86-a65f-21bccaf6df65 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.241410] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-06ea2898-18bc-4619-a9ea-bb92ef10e993 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.248392] env[68638]: DEBUG oslo_vmware.api [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 706.248392] env[68638]: value = "task-2833325" [ 706.248392] env[68638]: _type = "Task" [ 706.248392] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.253264] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Created folder: Project (6560695925e44dba9136a8d16a0c847d) in parent group-v569734. [ 706.253452] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Creating folder: Instances. Parent ref: group-v569844. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 706.254063] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fc25f61b-e162-4a84-ae7e-7e63146c9842 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.258604] env[68638]: DEBUG oslo_vmware.api [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833325, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.265784] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Created folder: Instances in parent group-v569844. [ 706.265932] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 706.266675] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 706.266675] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e33f522b-688d-404c-82b1-a3cf2d65cc0d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.282939] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 706.282939] env[68638]: value = "task-2833327" [ 706.282939] env[68638]: _type = "Task" [ 706.282939] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.291220] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833327, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.407682] env[68638]: DEBUG nova.scheduler.client.report [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 706.503934] env[68638]: DEBUG oslo_vmware.api [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Task: {'id': task-2833321, 'name': Rename_Task, 'duration_secs': 0.163855} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.505029] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 706.505029] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-db796794-612f-4d9c-8b03-ef697dfc0f46 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.511944] env[68638]: DEBUG oslo_vmware.api [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Waiting for the task: (returnval){ [ 706.511944] env[68638]: value = "task-2833328" [ 706.511944] env[68638]: _type = "Task" [ 706.511944] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.522890] env[68638]: DEBUG oslo_vmware.api [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Task: {'id': task-2833328, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.577237] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833322, 'name': CreateVM_Task, 'duration_secs': 0.433043} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.577485] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 706.578015] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.578301] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.578782] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 706.579134] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8afe246a-77fb-4a40-bb58-df8b4d61ed6b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.584899] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Waiting for the task: (returnval){ [ 706.584899] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d13b37-e878-9a50-7193-2542b34f3515" [ 706.584899] env[68638]: _type = "Task" [ 706.584899] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.597164] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d13b37-e878-9a50-7193-2542b34f3515, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.669912] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833323, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.760160] env[68638]: DEBUG oslo_vmware.api [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833325, 'name': CreateSnapshot_Task, 'duration_secs': 0.498303} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.760505] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Created Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 706.764030] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ccdb4e0-2002-46ce-9000-7c9cc545bf02 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.793608] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833327, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.915927] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.775s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.916587] env[68638]: DEBUG nova.compute.manager [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 706.920119] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.944s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.920371] env[68638]: DEBUG nova.objects.instance [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Lazy-loading 'resources' on Instance uuid 4eb4360a-46a8-440b-b300-4724c3497ff2 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 707.028718] env[68638]: DEBUG oslo_vmware.api [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Task: {'id': task-2833328, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.096936] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d13b37-e878-9a50-7193-2542b34f3515, 'name': SearchDatastore_Task, 'duration_secs': 0.058829} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.097278] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 707.097523] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 707.097761] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.097907] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.098109] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 707.098391] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-613bd863-c08f-4b7e-a805-30ae3483e47f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.109923] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 707.109923] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 707.110251] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3316e397-6a36-4955-bbd5-727c56e482f3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.116143] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Waiting for the task: (returnval){ [ 707.116143] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]522d5029-da56-5a14-c37c-ffb9f052118c" [ 707.116143] env[68638]: _type = "Task" [ 707.116143] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.124767] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]522d5029-da56-5a14-c37c-ffb9f052118c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.166546] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833323, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.554843} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.166812] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 1b176c5d-e77c-410b-b282-b7bba65359a9/1b176c5d-e77c-410b-b282-b7bba65359a9.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 707.167059] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 707.167362] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-30e4fe1f-cce1-4919-8b3a-4067eea7614c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.176129] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquiring lock "809416da-af6c-429d-b4b2-5334768aa744" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 707.176438] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Lock "809416da-af6c-429d-b4b2-5334768aa744" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 707.176741] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 707.176741] env[68638]: value = "task-2833329" [ 707.176741] env[68638]: _type = "Task" [ 707.176741] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.191898] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833329, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.283813] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Creating linked-clone VM from snapshot {{(pid=68638) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 707.284357] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-acce6546-c6e5-440f-af2f-99f5be77da62 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.296890] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833327, 'name': CreateVM_Task, 'duration_secs': 0.58657} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.299025] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 707.299025] env[68638]: DEBUG oslo_vmware.api [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 707.299025] env[68638]: value = "task-2833330" [ 707.299025] env[68638]: _type = "Task" [ 707.299025] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.299025] env[68638]: DEBUG oslo_concurrency.lockutils [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.299291] env[68638]: DEBUG oslo_concurrency.lockutils [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.299682] env[68638]: DEBUG oslo_concurrency.lockutils [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 707.300093] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23db4713-da82-464b-8ab0-4659d9af88f1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.310177] env[68638]: DEBUG oslo_vmware.api [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 707.310177] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52b52f7b-7559-83a0-5783-15ad2eb6efa2" [ 707.310177] env[68638]: _type = "Task" [ 707.310177] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.312666] env[68638]: DEBUG oslo_vmware.api [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833330, 'name': CloneVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.320912] env[68638]: DEBUG oslo_vmware.api [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b52f7b-7559-83a0-5783-15ad2eb6efa2, 'name': SearchDatastore_Task, 'duration_secs': 0.008889} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.321167] env[68638]: DEBUG oslo_concurrency.lockutils [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 707.321398] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 707.321625] env[68638]: DEBUG oslo_concurrency.lockutils [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.421603] env[68638]: DEBUG nova.compute.utils [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 707.423206] env[68638]: DEBUG nova.compute.manager [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 707.423388] env[68638]: DEBUG nova.network.neutron [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 707.470102] env[68638]: DEBUG nova.policy [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '075b1dab9233409390d346c7bbfa3d4e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'efa342b9d9a34e9e8e708c8f356f905e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 707.525262] env[68638]: DEBUG oslo_vmware.api [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Task: {'id': task-2833328, 'name': PowerOnVM_Task, 'duration_secs': 0.556806} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.527036] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 707.527036] env[68638]: INFO nova.compute.manager [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Took 9.09 seconds to spawn the instance on the hypervisor. [ 707.527172] env[68638]: DEBUG nova.compute.manager [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 707.528081] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3b8232-b55c-4e5d-8c53-a1697a78130a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.628449] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]522d5029-da56-5a14-c37c-ffb9f052118c, 'name': SearchDatastore_Task, 'duration_secs': 0.009613} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.632442] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c796540-c89c-4734-98f4-3cb9f225d34d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.643954] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Waiting for the task: (returnval){ [ 707.643954] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528a279e-d0d0-b5e0-381d-b60a22a0cb70" [ 707.643954] env[68638]: _type = "Task" [ 707.643954] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.654501] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528a279e-d0d0-b5e0-381d-b60a22a0cb70, 'name': SearchDatastore_Task, 'duration_secs': 0.010022} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.654846] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 707.655164] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] f43dae1e-3442-450a-b9e8-3884504a2b38/f43dae1e-3442-450a-b9e8-3884504a2b38.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 707.656010] env[68638]: DEBUG oslo_concurrency.lockutils [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.656010] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 707.656010] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8ac33b1c-8e5e-4f2f-8b12-77e7a0f41a64 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.661260] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-344236b9-9879-4287-83d4-dc12d810afe9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.669084] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Waiting for the task: (returnval){ [ 707.669084] env[68638]: value = "task-2833331" [ 707.669084] env[68638]: _type = "Task" [ 707.669084] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.674299] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 707.674299] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 707.675503] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-435e4b31-db9c-4046-9729-da63cef4fe8f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.682881] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833331, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.692591] env[68638]: DEBUG oslo_vmware.api [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 707.692591] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5224b98b-a9e7-59e0-6fde-99700ce74477" [ 707.692591] env[68638]: _type = "Task" [ 707.692591] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.700720] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833329, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081824} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.701424] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 707.702328] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b67f1cb0-7d65-412e-aad3-487257c71ace {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.712390] env[68638]: DEBUG oslo_vmware.api [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5224b98b-a9e7-59e0-6fde-99700ce74477, 'name': SearchDatastore_Task, 'duration_secs': 0.008462} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.714028] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5d63675-e242-436c-83ba-fe06feda62d4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.734792] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] 1b176c5d-e77c-410b-b282-b7bba65359a9/1b176c5d-e77c-410b-b282-b7bba65359a9.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 707.739191] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f039202a-d9ba-4d76-a3bd-bba3a4204589 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.755449] env[68638]: DEBUG oslo_vmware.api [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 707.755449] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]523a3a55-41e8-0683-ad84-f1515a63f386" [ 707.755449] env[68638]: _type = "Task" [ 707.755449] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.760130] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 707.760130] env[68638]: value = "task-2833332" [ 707.760130] env[68638]: _type = "Task" [ 707.760130] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.766848] env[68638]: DEBUG oslo_vmware.api [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523a3a55-41e8-0683-ad84-f1515a63f386, 'name': SearchDatastore_Task, 'duration_secs': 0.008313} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.767476] env[68638]: DEBUG oslo_concurrency.lockutils [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 707.767736] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 20f2c343-1f32-4c36-b4a9-8f009b6ac326/20f2c343-1f32-4c36-b4a9-8f009b6ac326.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 707.770386] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0251da15-5203-4345-9fba-87986ba3f605 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.776050] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833332, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.780350] env[68638]: DEBUG oslo_vmware.api [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 707.780350] env[68638]: value = "task-2833333" [ 707.780350] env[68638]: _type = "Task" [ 707.780350] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.791140] env[68638]: DEBUG oslo_vmware.api [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833333, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.809176] env[68638]: DEBUG oslo_vmware.api [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833330, 'name': CloneVM_Task} progress is 93%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.827030] env[68638]: DEBUG nova.network.neutron [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Successfully created port: db439fed-d2ec-4e34-b43e-677c18b30fc9 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 707.926646] env[68638]: DEBUG nova.compute.manager [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 708.050900] env[68638]: INFO nova.compute.manager [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Took 46.93 seconds to build instance. [ 708.125510] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e06634d-a64c-4e4e-98e1-70812b074000 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.136533] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e49694e-917d-4a08-bd09-b3f6862f41af {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.170021] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3878991f-6868-48b8-8f29-eef43148e988 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.181744] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d13026-60f6-4768-bd5e-71f237dd6ce2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.185993] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833331, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.197144] env[68638]: DEBUG nova.compute.provider_tree [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 708.282028] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833332, 'name': ReconfigVM_Task, 'duration_secs': 0.512883} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.282028] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Reconfigured VM instance instance-00000024 to attach disk [datastore1] 1b176c5d-e77c-410b-b282-b7bba65359a9/1b176c5d-e77c-410b-b282-b7bba65359a9.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 708.282028] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75962325-4513-43f6-a694-f3c90e5ff6f4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.296716] env[68638]: DEBUG oslo_vmware.api [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833333, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.298442] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 708.298442] env[68638]: value = "task-2833334" [ 708.298442] env[68638]: _type = "Task" [ 708.298442] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.312976] env[68638]: DEBUG oslo_vmware.api [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833330, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.315494] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833334, 'name': Rename_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.554028] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6e04aec-2c76-41dc-a103-af560515c5c9 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Lock "ebd7dd7a-2565-45da-bf7a-b8047c54ebe4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.783s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 708.680624] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833331, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.51234} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.680934] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] f43dae1e-3442-450a-b9e8-3884504a2b38/f43dae1e-3442-450a-b9e8-3884504a2b38.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 708.681203] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 708.681522] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bbfdd618-c16a-4f4c-a00b-b2e386b18eb7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.687895] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Waiting for the task: (returnval){ [ 708.687895] env[68638]: value = "task-2833335" [ 708.687895] env[68638]: _type = "Task" [ 708.687895] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.695665] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833335, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.700287] env[68638]: DEBUG nova.scheduler.client.report [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 708.792727] env[68638]: DEBUG oslo_vmware.api [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833333, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.805383} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.793020] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 20f2c343-1f32-4c36-b4a9-8f009b6ac326/20f2c343-1f32-4c36-b4a9-8f009b6ac326.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 708.793256] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 708.793543] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bc953710-b0e9-43d1-90fe-0b6da42d2277 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.800231] env[68638]: DEBUG oslo_vmware.api [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 708.800231] env[68638]: value = "task-2833336" [ 708.800231] env[68638]: _type = "Task" [ 708.800231] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.813690] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833334, 'name': Rename_Task, 'duration_secs': 0.229576} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.820051] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 708.820356] env[68638]: DEBUG oslo_vmware.api [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833330, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.820580] env[68638]: DEBUG oslo_vmware.api [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833336, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.820781] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b837ed51-a968-4adb-89a0-afa213f1d6b3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.826900] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 708.826900] env[68638]: value = "task-2833337" [ 708.826900] env[68638]: _type = "Task" [ 708.826900] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.834898] env[68638]: DEBUG oslo_concurrency.lockutils [None req-eab8e8ed-2da5-4c76-b809-3b5bba4d52c6 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Acquiring lock "interface-ebd7dd7a-2565-45da-bf7a-b8047c54ebe4-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 708.835154] env[68638]: DEBUG oslo_concurrency.lockutils [None req-eab8e8ed-2da5-4c76-b809-3b5bba4d52c6 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Lock "interface-ebd7dd7a-2565-45da-bf7a-b8047c54ebe4-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 708.835467] env[68638]: DEBUG nova.objects.instance [None req-eab8e8ed-2da5-4c76-b809-3b5bba4d52c6 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Lazy-loading 'flavor' on Instance uuid ebd7dd7a-2565-45da-bf7a-b8047c54ebe4 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 708.836755] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833337, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.939876] env[68638]: DEBUG nova.compute.manager [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 708.962768] env[68638]: DEBUG nova.virt.hardware [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 708.963075] env[68638]: DEBUG nova.virt.hardware [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 708.963264] env[68638]: DEBUG nova.virt.hardware [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 708.963484] env[68638]: DEBUG nova.virt.hardware [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 708.963655] env[68638]: DEBUG nova.virt.hardware [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 708.963822] env[68638]: DEBUG nova.virt.hardware [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 708.964060] env[68638]: DEBUG nova.virt.hardware [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 708.964229] env[68638]: DEBUG nova.virt.hardware [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 708.964401] env[68638]: DEBUG nova.virt.hardware [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 708.964576] env[68638]: DEBUG nova.virt.hardware [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 708.964731] env[68638]: DEBUG nova.virt.hardware [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 708.965633] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b274fff-5c34-42e6-b895-d4bdfbb369db {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.973793] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c2d1a0-687f-44d0-beec-695c56c337ab {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.056661] env[68638]: DEBUG nova.compute.manager [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 709.199587] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833335, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062409} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.199879] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 709.201040] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e845887c-80e3-4e62-8464-e847eba38f36 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.206027] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.286s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 709.217928] env[68638]: DEBUG oslo_concurrency.lockutils [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.059s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.218193] env[68638]: DEBUG nova.objects.instance [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lazy-loading 'resources' on Instance uuid f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 709.227952] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] f43dae1e-3442-450a-b9e8-3884504a2b38/f43dae1e-3442-450a-b9e8-3884504a2b38.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 709.228475] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca6d5902-0b2c-45c9-b370-3ecb9018768e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.245254] env[68638]: INFO nova.scheduler.client.report [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Deleted allocations for instance 4eb4360a-46a8-440b-b300-4724c3497ff2 [ 709.251877] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Waiting for the task: (returnval){ [ 709.251877] env[68638]: value = "task-2833338" [ 709.251877] env[68638]: _type = "Task" [ 709.251877] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.261675] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833338, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.323826] env[68638]: DEBUG oslo_vmware.api [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833330, 'name': CloneVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.324106] env[68638]: DEBUG oslo_vmware.api [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833336, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063914} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.324349] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 709.325176] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1a6b8a-06c3-471f-a823-3e2a352352ae {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.340817] env[68638]: DEBUG nova.objects.instance [None req-eab8e8ed-2da5-4c76-b809-3b5bba4d52c6 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Lazy-loading 'pci_requests' on Instance uuid ebd7dd7a-2565-45da-bf7a-b8047c54ebe4 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 709.350592] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] 20f2c343-1f32-4c36-b4a9-8f009b6ac326/20f2c343-1f32-4c36-b4a9-8f009b6ac326.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 709.355066] env[68638]: DEBUG nova.objects.base [None req-eab8e8ed-2da5-4c76-b809-3b5bba4d52c6 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=68638) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 709.355279] env[68638]: DEBUG nova.network.neutron [None req-eab8e8ed-2da5-4c76-b809-3b5bba4d52c6 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 709.357108] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d040ac18-248b-4ae8-981c-09588d9f2bc1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.371819] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833337, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.377828] env[68638]: DEBUG oslo_vmware.api [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 709.377828] env[68638]: value = "task-2833339" [ 709.377828] env[68638]: _type = "Task" [ 709.377828] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.386255] env[68638]: DEBUG oslo_vmware.api [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833339, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.456802] env[68638]: DEBUG oslo_concurrency.lockutils [None req-eab8e8ed-2da5-4c76-b809-3b5bba4d52c6 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Lock "interface-ebd7dd7a-2565-45da-bf7a-b8047c54ebe4-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 0.621s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 709.540033] env[68638]: DEBUG nova.network.neutron [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Successfully updated port: db439fed-d2ec-4e34-b43e-677c18b30fc9 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 709.577163] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.757114] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4196f5c6-d46c-4234-b932-80b841671508 tempest-DeleteServersAdminTestJSON-2018211535 tempest-DeleteServersAdminTestJSON-2018211535-project-member] Lock "4eb4360a-46a8-440b-b300-4724c3497ff2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.515s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 709.764574] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833338, 'name': ReconfigVM_Task, 'duration_secs': 0.295123} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.764574] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Reconfigured VM instance instance-00000022 to attach disk [datastore1] f43dae1e-3442-450a-b9e8-3884504a2b38/f43dae1e-3442-450a-b9e8-3884504a2b38.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 709.765077] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-23576fca-3b90-465d-afbc-5dfc3c8c4259 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.772853] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Waiting for the task: (returnval){ [ 709.772853] env[68638]: value = "task-2833340" [ 709.772853] env[68638]: _type = "Task" [ 709.772853] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.782324] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833340, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.814690] env[68638]: DEBUG oslo_vmware.api [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833330, 'name': CloneVM_Task, 'duration_secs': 2.052931} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.815059] env[68638]: INFO nova.virt.vmwareapi.vmops [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Created linked-clone VM from snapshot [ 709.818098] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7848b9b9-9ceb-4957-bc2b-a93b3031dfcc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.828027] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Uploading image 39920d52-dd3b-4dcd-9368-2d96d73cf30e {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 709.840297] env[68638]: DEBUG oslo_vmware.api [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833337, 'name': PowerOnVM_Task, 'duration_secs': 0.926033} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.840297] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 709.840460] env[68638]: INFO nova.compute.manager [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Took 8.64 seconds to spawn the instance on the hypervisor. [ 709.841028] env[68638]: DEBUG nova.compute.manager [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 709.841342] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f9a68d7-f39e-4db4-9a1a-eadc6a13824d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.862827] env[68638]: DEBUG oslo_vmware.rw_handles [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 709.862827] env[68638]: value = "vm-569848" [ 709.862827] env[68638]: _type = "VirtualMachine" [ 709.862827] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 709.864120] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-c32c35df-17a4-43a9-81ad-dae25319cfce {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.869523] env[68638]: DEBUG oslo_vmware.rw_handles [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lease: (returnval){ [ 709.869523] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528a2564-0bee-0dc7-d0c0-993f6d609bc7" [ 709.869523] env[68638]: _type = "HttpNfcLease" [ 709.869523] env[68638]: } obtained for exporting VM: (result){ [ 709.869523] env[68638]: value = "vm-569848" [ 709.869523] env[68638]: _type = "VirtualMachine" [ 709.869523] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 709.869863] env[68638]: DEBUG oslo_vmware.api [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the lease: (returnval){ [ 709.869863] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528a2564-0bee-0dc7-d0c0-993f6d609bc7" [ 709.869863] env[68638]: _type = "HttpNfcLease" [ 709.869863] env[68638]: } to be ready. {{(pid=68638) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 709.876415] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 709.876415] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528a2564-0bee-0dc7-d0c0-993f6d609bc7" [ 709.876415] env[68638]: _type = "HttpNfcLease" [ 709.876415] env[68638]: } is initializing. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 709.888288] env[68638]: DEBUG oslo_vmware.api [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833339, 'name': ReconfigVM_Task, 'duration_secs': 0.277435} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.888598] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Reconfigured VM instance instance-00000025 to attach disk [datastore1] 20f2c343-1f32-4c36-b4a9-8f009b6ac326/20f2c343-1f32-4c36-b4a9-8f009b6ac326.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 709.889275] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c73ef24c-6a95-466c-8de0-1fb209002c6d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.897654] env[68638]: DEBUG oslo_vmware.api [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 709.897654] env[68638]: value = "task-2833342" [ 709.897654] env[68638]: _type = "Task" [ 709.897654] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.908981] env[68638]: DEBUG oslo_vmware.api [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833342, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.919571] env[68638]: DEBUG nova.compute.manager [req-9d2e3362-0573-4dab-bffa-39296dd63ae9 req-bf89f7d7-9de6-44f3-8402-0d3c3167ecf1 service nova] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Received event network-vif-plugged-db439fed-d2ec-4e34-b43e-677c18b30fc9 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 709.919571] env[68638]: DEBUG oslo_concurrency.lockutils [req-9d2e3362-0573-4dab-bffa-39296dd63ae9 req-bf89f7d7-9de6-44f3-8402-0d3c3167ecf1 service nova] Acquiring lock "24982641-40ec-4fab-8385-1bc9dea6ade1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.919808] env[68638]: DEBUG oslo_concurrency.lockutils [req-9d2e3362-0573-4dab-bffa-39296dd63ae9 req-bf89f7d7-9de6-44f3-8402-0d3c3167ecf1 service nova] Lock "24982641-40ec-4fab-8385-1bc9dea6ade1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.919976] env[68638]: DEBUG oslo_concurrency.lockutils [req-9d2e3362-0573-4dab-bffa-39296dd63ae9 req-bf89f7d7-9de6-44f3-8402-0d3c3167ecf1 service nova] Lock "24982641-40ec-4fab-8385-1bc9dea6ade1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 709.920127] env[68638]: DEBUG nova.compute.manager [req-9d2e3362-0573-4dab-bffa-39296dd63ae9 req-bf89f7d7-9de6-44f3-8402-0d3c3167ecf1 service nova] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] No waiting events found dispatching network-vif-plugged-db439fed-d2ec-4e34-b43e-677c18b30fc9 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 709.920311] env[68638]: WARNING nova.compute.manager [req-9d2e3362-0573-4dab-bffa-39296dd63ae9 req-bf89f7d7-9de6-44f3-8402-0d3c3167ecf1 service nova] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Received unexpected event network-vif-plugged-db439fed-d2ec-4e34-b43e-677c18b30fc9 for instance with vm_state building and task_state spawning. [ 710.044359] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "refresh_cache-24982641-40ec-4fab-8385-1bc9dea6ade1" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.044461] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired lock "refresh_cache-24982641-40ec-4fab-8385-1bc9dea6ade1" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 710.044645] env[68638]: DEBUG nova.network.neutron [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 710.271649] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb09534-26fc-42cc-8536-0f3d28913b8a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.285241] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833340, 'name': Rename_Task, 'duration_secs': 0.152092} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.285581] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 710.286546] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af58c0f2-f0b7-433f-9f0d-b57ade8d91e7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.290025] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-48aa4854-25b5-43fe-a7a5-32d8487da04e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.321855] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1905dcad-6d9e-4420-b5d8-4017b3dd25c1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.324479] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Waiting for the task: (returnval){ [ 710.324479] env[68638]: value = "task-2833343" [ 710.324479] env[68638]: _type = "Task" [ 710.324479] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.330827] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5815dd4d-999c-4788-a958-e29bc2c65b30 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.337528] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833343, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.346876] env[68638]: DEBUG nova.compute.provider_tree [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 710.363332] env[68638]: INFO nova.compute.manager [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Took 46.38 seconds to build instance. [ 710.378165] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 710.378165] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528a2564-0bee-0dc7-d0c0-993f6d609bc7" [ 710.378165] env[68638]: _type = "HttpNfcLease" [ 710.378165] env[68638]: } is ready. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 710.378456] env[68638]: DEBUG oslo_vmware.rw_handles [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 710.378456] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528a2564-0bee-0dc7-d0c0-993f6d609bc7" [ 710.378456] env[68638]: _type = "HttpNfcLease" [ 710.378456] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 710.379499] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302e4446-8da5-4dac-a721-2bad3fbe7e34 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.387052] env[68638]: DEBUG oslo_vmware.rw_handles [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520a03d6-5fb7-b2b3-fa18-32f2d93403c1/disk-0.vmdk from lease info. {{(pid=68638) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 710.387196] env[68638]: DEBUG oslo_vmware.rw_handles [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520a03d6-5fb7-b2b3-fa18-32f2d93403c1/disk-0.vmdk for reading. {{(pid=68638) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 710.454925] env[68638]: DEBUG oslo_vmware.api [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833342, 'name': Rename_Task, 'duration_secs': 0.131761} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.455538] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 710.455795] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-34d17547-2508-40e8-81d3-427e18a4b9d8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.461295] env[68638]: DEBUG oslo_vmware.api [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 710.461295] env[68638]: value = "task-2833344" [ 710.461295] env[68638]: _type = "Task" [ 710.461295] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.471572] env[68638]: DEBUG oslo_vmware.api [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833344, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.529301] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-cb5b8c35-500e-49c5-8469-a352d5d91aa5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.578694] env[68638]: DEBUG nova.network.neutron [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 710.839534] env[68638]: DEBUG oslo_vmware.api [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833343, 'name': PowerOnVM_Task, 'duration_secs': 0.478699} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.840943] env[68638]: DEBUG nova.network.neutron [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Updating instance_info_cache with network_info: [{"id": "db439fed-d2ec-4e34-b43e-677c18b30fc9", "address": "fa:16:3e:26:e2:9b", "network": {"id": "2181efd7-a094-4c4b-8754-da82e89be85a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1274773453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "efa342b9d9a34e9e8e708c8f356f905e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb439fed-d2", "ovs_interfaceid": "db439fed-d2ec-4e34-b43e-677c18b30fc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.843481] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 710.843862] env[68638]: DEBUG nova.compute.manager [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 710.845775] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d816db-dc9c-48e4-9dbc-0bfe76fe5b9d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.849707] env[68638]: DEBUG nova.scheduler.client.report [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 710.865356] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb255e9a-2120-4679-aa4b-052041b038b2 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "1b176c5d-e77c-410b-b282-b7bba65359a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.934s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.978788] env[68638]: DEBUG oslo_vmware.api [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833344, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.345277] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Releasing lock "refresh_cache-24982641-40ec-4fab-8385-1bc9dea6ade1" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 711.345597] env[68638]: DEBUG nova.compute.manager [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Instance network_info: |[{"id": "db439fed-d2ec-4e34-b43e-677c18b30fc9", "address": "fa:16:3e:26:e2:9b", "network": {"id": "2181efd7-a094-4c4b-8754-da82e89be85a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1274773453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "efa342b9d9a34e9e8e708c8f356f905e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb439fed-d2", "ovs_interfaceid": "db439fed-d2ec-4e34-b43e-677c18b30fc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 711.345956] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:e2:9b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69cfa7ba-6989-4d75-9495-97b5fea00c3c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'db439fed-d2ec-4e34-b43e-677c18b30fc9', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 711.356285] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 711.357081] env[68638]: DEBUG oslo_concurrency.lockutils [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.139s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 711.359511] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 711.360248] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 30.540s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.361920] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ad235f19-cd0c-4603-bfc1-26c5004133ac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.384896] env[68638]: DEBUG nova.compute.manager [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 711.390238] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.394608] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 711.394608] env[68638]: value = "task-2833345" [ 711.394608] env[68638]: _type = "Task" [ 711.394608] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.403499] env[68638]: INFO nova.scheduler.client.report [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Deleted allocations for instance f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8 [ 711.409343] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833345, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.475239] env[68638]: DEBUG oslo_vmware.api [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833344, 'name': PowerOnVM_Task, 'duration_secs': 0.529186} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.475929] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 711.476330] env[68638]: INFO nova.compute.manager [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Took 5.30 seconds to spawn the instance on the hypervisor. [ 711.476848] env[68638]: DEBUG nova.compute.manager [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 711.478613] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-554287df-9421-42ab-a624-c8b574761fce {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.643256] env[68638]: DEBUG oslo_concurrency.lockutils [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Acquiring lock "ebd7dd7a-2565-45da-bf7a-b8047c54ebe4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.643753] env[68638]: DEBUG oslo_concurrency.lockutils [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Lock "ebd7dd7a-2565-45da-bf7a-b8047c54ebe4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.644153] env[68638]: DEBUG oslo_concurrency.lockutils [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Acquiring lock "ebd7dd7a-2565-45da-bf7a-b8047c54ebe4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.645596] env[68638]: DEBUG oslo_concurrency.lockutils [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Lock "ebd7dd7a-2565-45da-bf7a-b8047c54ebe4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.646174] env[68638]: DEBUG oslo_concurrency.lockutils [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Lock "ebd7dd7a-2565-45da-bf7a-b8047c54ebe4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.002s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 711.649164] env[68638]: INFO nova.compute.manager [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Terminating instance [ 711.907096] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833345, 'name': CreateVM_Task, 'duration_secs': 0.494868} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.907096] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 711.907671] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.907831] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 711.908164] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 711.908425] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1070de77-4d21-4bc0-91be-af8b0ac7df40 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.913630] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 711.913630] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52f617e5-182c-92bc-7f23-4f826fa8b590" [ 711.913630] env[68638]: _type = "Task" [ 711.913630] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.920845] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.921790] env[68638]: DEBUG oslo_concurrency.lockutils [None req-021d09f5-1187-4b81-871b-012dc8b45a69 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.328s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 711.926043] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f617e5-182c-92bc-7f23-4f826fa8b590, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.000848] env[68638]: INFO nova.compute.manager [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Took 38.43 seconds to build instance. [ 712.008924] env[68638]: DEBUG nova.compute.manager [req-4103e892-b144-4121-a608-bec5f6a00688 req-8300c893-3e3d-403e-b9ab-7106e850d557 service nova] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Received event network-changed-db439fed-d2ec-4e34-b43e-677c18b30fc9 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 712.009245] env[68638]: DEBUG nova.compute.manager [req-4103e892-b144-4121-a608-bec5f6a00688 req-8300c893-3e3d-403e-b9ab-7106e850d557 service nova] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Refreshing instance network info cache due to event network-changed-db439fed-d2ec-4e34-b43e-677c18b30fc9. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 712.009964] env[68638]: DEBUG oslo_concurrency.lockutils [req-4103e892-b144-4121-a608-bec5f6a00688 req-8300c893-3e3d-403e-b9ab-7106e850d557 service nova] Acquiring lock "refresh_cache-24982641-40ec-4fab-8385-1bc9dea6ade1" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.009964] env[68638]: DEBUG oslo_concurrency.lockutils [req-4103e892-b144-4121-a608-bec5f6a00688 req-8300c893-3e3d-403e-b9ab-7106e850d557 service nova] Acquired lock "refresh_cache-24982641-40ec-4fab-8385-1bc9dea6ade1" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 712.009964] env[68638]: DEBUG nova.network.neutron [req-4103e892-b144-4121-a608-bec5f6a00688 req-8300c893-3e3d-403e-b9ab-7106e850d557 service nova] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Refreshing network info cache for port db439fed-d2ec-4e34-b43e-677c18b30fc9 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 712.154027] env[68638]: DEBUG nova.compute.manager [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 712.154503] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 712.155933] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff14dbb-1112-4631-ba0d-679be7067d54 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.166069] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 712.166069] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0b059167-c6ee-4d11-91da-1a24ae853c8e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.172511] env[68638]: DEBUG oslo_vmware.api [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Waiting for the task: (returnval){ [ 712.172511] env[68638]: value = "task-2833346" [ 712.172511] env[68638]: _type = "Task" [ 712.172511] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.181678] env[68638]: DEBUG oslo_vmware.api [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Task: {'id': task-2833346, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.368374] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Acquiring lock "f43dae1e-3442-450a-b9e8-3884504a2b38" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 712.368707] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Lock "f43dae1e-3442-450a-b9e8-3884504a2b38" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 712.368925] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Acquiring lock "f43dae1e-3442-450a-b9e8-3884504a2b38-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 712.369219] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Lock "f43dae1e-3442-450a-b9e8-3884504a2b38-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 712.369388] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Lock "f43dae1e-3442-450a-b9e8-3884504a2b38-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 712.375139] env[68638]: INFO nova.compute.manager [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Terminating instance [ 712.425658] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f617e5-182c-92bc-7f23-4f826fa8b590, 'name': SearchDatastore_Task, 'duration_secs': 0.014608} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.426057] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 712.426305] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 712.426575] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.426776] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 712.426925] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 712.427643] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1785e7c-59a0-453a-8334-10698b023b94 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.430511] env[68638]: WARNING nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance e3cf739a-3104-473d-af66-d9974ed1a222 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 712.430654] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance c80895d5-1a59-4779-9da9-9aeec10bc395 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.430775] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 14772ba8-bde2-42ef-9a37-df876c8af321 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.430896] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 423af2cc-4dea-445f-a01c-6d4d57c3f0de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.430997] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.431139] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.431248] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 1946baab-bb48-4138-8db6-1f530e432c3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.431355] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 7617a7b1-3b21-4d38-b090-1d35bc74637b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.431466] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.431594] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance be761cf1-0949-42c0-8a38-58af33113a03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.431725] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 1eee31b7-db8b-4765-8cc2-4273717ef86e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.431836] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance ac0141c2-aef6-4edf-913a-d4a41b502c10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.431942] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 4edaaa5d-535a-4c63-ab44-724548a0f3eb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.432064] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.432198] env[68638]: WARNING nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance aaf0185b-1a85-4e0e-afb1-55e9e2417d76 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 712.432310] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 2fa9b930-c76c-4cac-a371-a6b9899dc71e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.432455] env[68638]: WARNING nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 7b0b6eec-4681-4926-ad3f-5572e022a467 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 712.432580] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 5a28d684-584b-4e13-9910-183119ce5d37 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.432695] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance f43dae1e-3442-450a-b9e8-3884504a2b38 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.432804] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance ebd7dd7a-2565-45da-bf7a-b8047c54ebe4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.432956] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 1b176c5d-e77c-410b-b282-b7bba65359a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.433067] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 20f2c343-1f32-4c36-b4a9-8f009b6ac326 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.433177] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 24982641-40ec-4fab-8385-1bc9dea6ade1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 712.441805] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 712.442894] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 712.443909] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f2e889a-0e01-4b4b-bda6-23747a64bb07 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.451440] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 712.451440] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52171bee-c04f-a416-e558-179f537a3c8e" [ 712.451440] env[68638]: _type = "Task" [ 712.451440] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.463087] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52171bee-c04f-a416-e558-179f537a3c8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.503041] env[68638]: DEBUG oslo_concurrency.lockutils [None req-753b9d12-234f-457c-93fc-978c48131b5d tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Lock "20f2c343-1f32-4c36-b4a9-8f009b6ac326" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.322s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 712.624872] env[68638]: DEBUG oslo_concurrency.lockutils [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "8fe9ba7e-021c-4b0f-a9ba-df7a6b753248" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 712.625275] env[68638]: DEBUG oslo_concurrency.lockutils [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "8fe9ba7e-021c-4b0f-a9ba-df7a6b753248" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 712.625623] env[68638]: DEBUG oslo_concurrency.lockutils [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "8fe9ba7e-021c-4b0f-a9ba-df7a6b753248-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 712.626297] env[68638]: DEBUG oslo_concurrency.lockutils [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "8fe9ba7e-021c-4b0f-a9ba-df7a6b753248-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 712.626297] env[68638]: DEBUG oslo_concurrency.lockutils [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "8fe9ba7e-021c-4b0f-a9ba-df7a6b753248-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 712.629203] env[68638]: INFO nova.compute.manager [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Terminating instance [ 712.685771] env[68638]: DEBUG oslo_vmware.api [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Task: {'id': task-2833346, 'name': PowerOffVM_Task, 'duration_secs': 0.283719} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.685771] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 712.685771] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 712.686011] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eec92718-0504-448f-b5fc-a2c010913752 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.751147] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 712.751147] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 712.751147] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Deleting the datastore file [datastore1] ebd7dd7a-2565-45da-bf7a-b8047c54ebe4 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 712.751147] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cb503d12-11b2-44ba-b6d8-9fa5c72e87db {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.760066] env[68638]: DEBUG oslo_vmware.api [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Waiting for the task: (returnval){ [ 712.760066] env[68638]: value = "task-2833348" [ 712.760066] env[68638]: _type = "Task" [ 712.760066] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.769032] env[68638]: DEBUG oslo_vmware.api [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Task: {'id': task-2833348, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.840998] env[68638]: DEBUG nova.network.neutron [req-4103e892-b144-4121-a608-bec5f6a00688 req-8300c893-3e3d-403e-b9ab-7106e850d557 service nova] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Updated VIF entry in instance network info cache for port db439fed-d2ec-4e34-b43e-677c18b30fc9. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 712.841930] env[68638]: DEBUG nova.network.neutron [req-4103e892-b144-4121-a608-bec5f6a00688 req-8300c893-3e3d-403e-b9ab-7106e850d557 service nova] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Updating instance_info_cache with network_info: [{"id": "db439fed-d2ec-4e34-b43e-677c18b30fc9", "address": "fa:16:3e:26:e2:9b", "network": {"id": "2181efd7-a094-4c4b-8754-da82e89be85a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1274773453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "efa342b9d9a34e9e8e708c8f356f905e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb439fed-d2", "ovs_interfaceid": "db439fed-d2ec-4e34-b43e-677c18b30fc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.883850] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Acquiring lock "refresh_cache-f43dae1e-3442-450a-b9e8-3884504a2b38" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.884111] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Acquired lock "refresh_cache-f43dae1e-3442-450a-b9e8-3884504a2b38" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 712.884334] env[68638]: DEBUG nova.network.neutron [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 712.936997] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance b9736ec5-6332-4202-95d6-a3cd1d1f11d7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 712.964317] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52171bee-c04f-a416-e558-179f537a3c8e, 'name': SearchDatastore_Task, 'duration_secs': 0.012998} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.964947] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49051400-941e-46f7-8b0f-5ffd6a78db2a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.971094] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 712.971094] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52033fb7-4362-63fe-29ff-7113efdbb56c" [ 712.971094] env[68638]: _type = "Task" [ 712.971094] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.981797] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52033fb7-4362-63fe-29ff-7113efdbb56c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.005431] env[68638]: DEBUG nova.compute.manager [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 713.138842] env[68638]: DEBUG nova.compute.manager [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 713.139023] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 713.140030] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa00faf-296e-4210-9c1c-5a26a8a47eee {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.148109] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 713.148109] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-709f1bee-3c12-41ae-b704-b3b6faa3e122 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.154937] env[68638]: DEBUG oslo_vmware.api [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 713.154937] env[68638]: value = "task-2833349" [ 713.154937] env[68638]: _type = "Task" [ 713.154937] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.165371] env[68638]: DEBUG oslo_vmware.api [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833349, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.272900] env[68638]: DEBUG oslo_concurrency.lockutils [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Acquiring lock "333d88b6-2182-4e9c-9430-058e67921828" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.272900] env[68638]: DEBUG oslo_concurrency.lockutils [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lock "333d88b6-2182-4e9c-9430-058e67921828" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 713.279045] env[68638]: DEBUG oslo_vmware.api [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Task: {'id': task-2833348, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.28596} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.279238] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 713.279357] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 713.279538] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 713.279708] env[68638]: INFO nova.compute.manager [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Took 1.13 seconds to destroy the instance on the hypervisor. [ 713.279945] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 713.280159] env[68638]: DEBUG nova.compute.manager [-] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 713.280266] env[68638]: DEBUG nova.network.neutron [-] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 713.347144] env[68638]: DEBUG oslo_concurrency.lockutils [req-4103e892-b144-4121-a608-bec5f6a00688 req-8300c893-3e3d-403e-b9ab-7106e850d557 service nova] Releasing lock "refresh_cache-24982641-40ec-4fab-8385-1bc9dea6ade1" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 713.348084] env[68638]: DEBUG nova.compute.manager [req-4103e892-b144-4121-a608-bec5f6a00688 req-8300c893-3e3d-403e-b9ab-7106e850d557 service nova] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Received event network-changed-bc37d458-421e-4ca1-a705-30c976b1fdbd {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 713.348084] env[68638]: DEBUG nova.compute.manager [req-4103e892-b144-4121-a608-bec5f6a00688 req-8300c893-3e3d-403e-b9ab-7106e850d557 service nova] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Refreshing instance network info cache due to event network-changed-bc37d458-421e-4ca1-a705-30c976b1fdbd. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 713.348410] env[68638]: DEBUG oslo_concurrency.lockutils [req-4103e892-b144-4121-a608-bec5f6a00688 req-8300c893-3e3d-403e-b9ab-7106e850d557 service nova] Acquiring lock "refresh_cache-1b176c5d-e77c-410b-b282-b7bba65359a9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.348737] env[68638]: DEBUG oslo_concurrency.lockutils [req-4103e892-b144-4121-a608-bec5f6a00688 req-8300c893-3e3d-403e-b9ab-7106e850d557 service nova] Acquired lock "refresh_cache-1b176c5d-e77c-410b-b282-b7bba65359a9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 713.349053] env[68638]: DEBUG nova.network.neutron [req-4103e892-b144-4121-a608-bec5f6a00688 req-8300c893-3e3d-403e-b9ab-7106e850d557 service nova] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Refreshing network info cache for port bc37d458-421e-4ca1-a705-30c976b1fdbd {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 713.409402] env[68638]: DEBUG nova.network.neutron [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 713.441364] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 06a1a44f-35ee-45d2-9503-23468150b72f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 713.485065] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52033fb7-4362-63fe-29ff-7113efdbb56c, 'name': SearchDatastore_Task, 'duration_secs': 0.029117} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.485065] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 713.485065] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 24982641-40ec-4fab-8385-1bc9dea6ade1/24982641-40ec-4fab-8385-1bc9dea6ade1.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 713.485065] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e878541d-8945-4ede-b4e6-b1bbaa37b2d1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.486828] env[68638]: DEBUG nova.network.neutron [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.495671] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 713.495671] env[68638]: value = "task-2833350" [ 713.495671] env[68638]: _type = "Task" [ 713.495671] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.509774] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833350, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.537126] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.641047] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Acquiring lock "63669b15-2ec8-4a0d-b772-6ef7407e8ebf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.641047] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lock "63669b15-2ec8-4a0d-b772-6ef7407e8ebf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 713.665937] env[68638]: DEBUG oslo_vmware.api [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833349, 'name': PowerOffVM_Task, 'duration_secs': 0.333762} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.666303] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 713.668753] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 713.668753] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a9481aed-778c-480f-b4c8-8ae4d4ea825e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.794647] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 713.794920] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 713.795183] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Deleting the datastore file [datastore2] 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 713.795499] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8a199ca4-6112-48c6-9604-2753cf88f86f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.803285] env[68638]: DEBUG oslo_vmware.api [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 713.803285] env[68638]: value = "task-2833352" [ 713.803285] env[68638]: _type = "Task" [ 713.803285] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.815309] env[68638]: DEBUG oslo_vmware.api [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833352, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.945225] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 072be237-c51e-43d2-ad84-46122ef9f335 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 713.991738] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Releasing lock "refresh_cache-f43dae1e-3442-450a-b9e8-3884504a2b38" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 713.992286] env[68638]: DEBUG nova.compute.manager [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 713.992493] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 713.993460] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ec23d21-044c-4dbe-990a-b982432d1dec {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.009202] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833350, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.013553] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Acquiring lock "96848760-c8a0-43fa-ac7c-e6e56d6d6d83" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.013810] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Lock "96848760-c8a0-43fa-ac7c-e6e56d6d6d83" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 714.014122] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 714.014388] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de538272-02ba-4aca-9ac6-6ec8024458d6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.024682] env[68638]: DEBUG oslo_vmware.api [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Waiting for the task: (returnval){ [ 714.024682] env[68638]: value = "task-2833353" [ 714.024682] env[68638]: _type = "Task" [ 714.024682] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.037210] env[68638]: DEBUG oslo_vmware.api [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833353, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.042706] env[68638]: DEBUG nova.compute.manager [req-a8a0ea63-fbb8-4115-936d-1ea7e7d4ab5c req-97df0551-8910-4719-96aa-0042a0e59574 service nova] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Received event network-vif-deleted-9a4ba23f-3a11-4f1f-b92e-8260b30fe959 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 714.042917] env[68638]: INFO nova.compute.manager [req-a8a0ea63-fbb8-4115-936d-1ea7e7d4ab5c req-97df0551-8910-4719-96aa-0042a0e59574 service nova] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Neutron deleted interface 9a4ba23f-3a11-4f1f-b92e-8260b30fe959; detaching it from the instance and deleting it from the info cache [ 714.043101] env[68638]: DEBUG nova.network.neutron [req-a8a0ea63-fbb8-4115-936d-1ea7e7d4ab5c req-97df0551-8910-4719-96aa-0042a0e59574 service nova] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.103476] env[68638]: DEBUG nova.network.neutron [-] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.165244] env[68638]: DEBUG nova.network.neutron [req-4103e892-b144-4121-a608-bec5f6a00688 req-8300c893-3e3d-403e-b9ab-7106e850d557 service nova] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Updated VIF entry in instance network info cache for port bc37d458-421e-4ca1-a705-30c976b1fdbd. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 714.165528] env[68638]: DEBUG nova.network.neutron [req-4103e892-b144-4121-a608-bec5f6a00688 req-8300c893-3e3d-403e-b9ab-7106e850d557 service nova] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Updating instance_info_cache with network_info: [{"id": "bc37d458-421e-4ca1-a705-30c976b1fdbd", "address": "fa:16:3e:a6:47:0d", "network": {"id": "5de0e424-8bf1-4515-8c49-06607ad85c61", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1760008184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc24eaf6cf74d539558c0a736e18c3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc37d458-42", "ovs_interfaceid": "bc37d458-421e-4ca1-a705-30c976b1fdbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.314138] env[68638]: DEBUG oslo_vmware.api [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833352, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.464042} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.314392] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 714.314578] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 714.314711] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 714.314939] env[68638]: INFO nova.compute.manager [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Took 1.18 seconds to destroy the instance on the hypervisor. [ 714.315209] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 714.315708] env[68638]: DEBUG nova.compute.manager [-] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 714.315807] env[68638]: DEBUG nova.network.neutron [-] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 714.449218] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 5294e1b6-f34f-4f91-aa3e-e0276ad982ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 714.514373] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833350, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.593123} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.514373] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 24982641-40ec-4fab-8385-1bc9dea6ade1/24982641-40ec-4fab-8385-1bc9dea6ade1.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 714.514373] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 714.514373] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-50a5e5b6-f4b4-430f-9e01-2bbaa61e3734 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.521108] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 714.521108] env[68638]: value = "task-2833354" [ 714.521108] env[68638]: _type = "Task" [ 714.521108] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.533822] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833354, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.537014] env[68638]: DEBUG oslo_vmware.api [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833353, 'name': PowerOffVM_Task, 'duration_secs': 0.190609} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.537313] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 714.537501] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 714.537760] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e2116b79-abfa-49ec-ba5e-56279045db4b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.546697] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-df965cb9-abb5-4faf-a911-ae532fe532bc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.554815] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daaf09d0-f36b-48de-96ae-a37dbd4c1a8c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.570472] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 714.570774] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 714.570978] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Deleting the datastore file [datastore1] f43dae1e-3442-450a-b9e8-3884504a2b38 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 714.571758] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8c7b0071-b942-4f8f-a144-5c893035cc0a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.577605] env[68638]: DEBUG oslo_vmware.api [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Waiting for the task: (returnval){ [ 714.577605] env[68638]: value = "task-2833356" [ 714.577605] env[68638]: _type = "Task" [ 714.577605] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.598315] env[68638]: DEBUG nova.compute.manager [req-a8a0ea63-fbb8-4115-936d-1ea7e7d4ab5c req-97df0551-8910-4719-96aa-0042a0e59574 service nova] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Detach interface failed, port_id=9a4ba23f-3a11-4f1f-b92e-8260b30fe959, reason: Instance ebd7dd7a-2565-45da-bf7a-b8047c54ebe4 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 714.601952] env[68638]: DEBUG oslo_vmware.api [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833356, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.606509] env[68638]: INFO nova.compute.manager [-] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Took 1.33 seconds to deallocate network for instance. [ 714.668802] env[68638]: DEBUG oslo_concurrency.lockutils [req-4103e892-b144-4121-a608-bec5f6a00688 req-8300c893-3e3d-403e-b9ab-7106e850d557 service nova] Releasing lock "refresh_cache-1b176c5d-e77c-410b-b282-b7bba65359a9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 714.952668] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 8992f062-c28f-4ac8-8d0d-0c51c3784e88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 715.031313] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833354, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.117263} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.031551] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 715.032381] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb86d336-7533-47f0-8d16-8f2be3b1a8d5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.055974] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Reconfiguring VM instance instance-00000026 to attach disk [datastore2] 24982641-40ec-4fab-8385-1bc9dea6ade1/24982641-40ec-4fab-8385-1bc9dea6ade1.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 715.055974] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-790b3b84-4992-4ecd-8ba7-421715d28530 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.074568] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 715.074568] env[68638]: value = "task-2833357" [ 715.074568] env[68638]: _type = "Task" [ 715.074568] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.083960] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833357, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.089334] env[68638]: DEBUG oslo_vmware.api [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Task: {'id': task-2833356, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.200707} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.089626] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 715.089780] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 715.089969] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 715.090150] env[68638]: INFO nova.compute.manager [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Took 1.10 seconds to destroy the instance on the hypervisor. [ 715.090381] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 715.090565] env[68638]: DEBUG nova.compute.manager [-] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 715.090660] env[68638]: DEBUG nova.network.neutron [-] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 715.106529] env[68638]: DEBUG nova.network.neutron [-] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 715.115035] env[68638]: DEBUG oslo_concurrency.lockutils [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.163934] env[68638]: DEBUG nova.network.neutron [-] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.455989] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 94a33fcd-69b6-443b-9c86-5129e30b5b0d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 715.586342] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833357, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.609357] env[68638]: DEBUG nova.network.neutron [-] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.667087] env[68638]: INFO nova.compute.manager [-] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Took 1.35 seconds to deallocate network for instance. [ 715.959752] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 2450602a-fde7-4a65-b7a2-be4195077758 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 716.066084] env[68638]: DEBUG nova.compute.manager [req-bcb21886-bc65-4ce3-9d26-0f04e9eb8fbe req-9df39832-5adf-4d75-bbc3-cddaa581b648 service nova] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Received event network-vif-deleted-67d928ea-035b-4725-a33a-b0a2a24e0af4 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 716.085755] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833357, 'name': ReconfigVM_Task, 'duration_secs': 0.525032} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.086060] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Reconfigured VM instance instance-00000026 to attach disk [datastore2] 24982641-40ec-4fab-8385-1bc9dea6ade1/24982641-40ec-4fab-8385-1bc9dea6ade1.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 716.086895] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-94d7451c-0367-4107-a531-c0807c9561ba {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.093268] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 716.093268] env[68638]: value = "task-2833358" [ 716.093268] env[68638]: _type = "Task" [ 716.093268] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.101082] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833358, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.112352] env[68638]: INFO nova.compute.manager [-] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Took 1.02 seconds to deallocate network for instance. [ 716.174204] env[68638]: DEBUG oslo_concurrency.lockutils [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 716.464324] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance a3b06e32-2670-4381-bb91-4597bfcabaa6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 716.604249] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833358, 'name': Rename_Task, 'duration_secs': 0.29347} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.604572] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 716.604864] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-417a892e-133d-4a9c-8f71-c9202188baf2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.612244] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 716.612244] env[68638]: value = "task-2833359" [ 716.612244] env[68638]: _type = "Task" [ 716.612244] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.620703] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 716.620988] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833359, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.968071] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance a09c4492-34fd-4010-b547-bfb5b61f252d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 717.123348] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833359, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.475060] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 039edcf8-7908-4be4-8bd3-0b55545b6f7b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 717.623375] env[68638]: DEBUG oslo_vmware.api [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833359, 'name': PowerOnVM_Task, 'duration_secs': 0.607292} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.623758] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 717.623874] env[68638]: INFO nova.compute.manager [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Took 8.68 seconds to spawn the instance on the hypervisor. [ 717.624062] env[68638]: DEBUG nova.compute.manager [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 717.624862] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d62d1bc0-b5ab-4275-ab9d-14f1d87b16e1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.977884] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance da306fdd-a5b4-4275-a482-f77cc008d780 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 718.142556] env[68638]: INFO nova.compute.manager [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Took 42.14 seconds to build instance. [ 718.484792] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance bb86aabd-129d-4c14-9db1-6676a5e7b9fa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 718.644411] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0dd9d5-234d-48cd-b43c-65eb3728e86c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "24982641-40ec-4fab-8385-1bc9dea6ade1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.854s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 718.664499] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c2d539d3-de13-4c75-ae3a-0faf9f55c330 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "24982641-40ec-4fab-8385-1bc9dea6ade1" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 718.664836] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c2d539d3-de13-4c75-ae3a-0faf9f55c330 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "24982641-40ec-4fab-8385-1bc9dea6ade1" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.665071] env[68638]: DEBUG nova.compute.manager [None req-c2d539d3-de13-4c75-ae3a-0faf9f55c330 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 718.666453] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e9f0972-4d50-4458-ac4e-afbf05a4175a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.675129] env[68638]: DEBUG nova.compute.manager [None req-c2d539d3-de13-4c75-ae3a-0faf9f55c330 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68638) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 718.675866] env[68638]: DEBUG nova.objects.instance [None req-c2d539d3-de13-4c75-ae3a-0faf9f55c330 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lazy-loading 'flavor' on Instance uuid 24982641-40ec-4fab-8385-1bc9dea6ade1 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 718.987611] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 90c192bd-b823-414c-b793-260eacc9904f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 719.102103] env[68638]: DEBUG oslo_vmware.rw_handles [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520a03d6-5fb7-b2b3-fa18-32f2d93403c1/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 719.103296] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8882fc6f-44b3-463c-bbc6-fd6e5c131b3c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.109970] env[68638]: DEBUG oslo_vmware.rw_handles [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520a03d6-5fb7-b2b3-fa18-32f2d93403c1/disk-0.vmdk is in state: ready. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 719.110156] env[68638]: ERROR oslo_vmware.rw_handles [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520a03d6-5fb7-b2b3-fa18-32f2d93403c1/disk-0.vmdk due to incomplete transfer. [ 719.110382] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-be883210-8c97-4247-af54-d2270ef6dbbb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.117154] env[68638]: DEBUG oslo_vmware.rw_handles [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520a03d6-5fb7-b2b3-fa18-32f2d93403c1/disk-0.vmdk. {{(pid=68638) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 719.117367] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Uploaded image 39920d52-dd3b-4dcd-9368-2d96d73cf30e to the Glance image server {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 719.119625] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Destroying the VM {{(pid=68638) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 719.119783] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8ad2a1eb-0fab-48a7-85e5-749f1a41332c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.125488] env[68638]: DEBUG oslo_vmware.api [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 719.125488] env[68638]: value = "task-2833360" [ 719.125488] env[68638]: _type = "Task" [ 719.125488] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.133957] env[68638]: DEBUG oslo_vmware.api [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833360, 'name': Destroy_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.147583] env[68638]: DEBUG nova.compute.manager [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 719.490810] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 772af0c0-a8dd-4167-87bc-617a9d95b54d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 719.635271] env[68638]: DEBUG oslo_vmware.api [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833360, 'name': Destroy_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.669778] env[68638]: DEBUG oslo_concurrency.lockutils [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.683759] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2d539d3-de13-4c75-ae3a-0faf9f55c330 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 719.684093] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ed510be6-bdad-4399-b0e7-b5fa9339819a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.690931] env[68638]: DEBUG oslo_vmware.api [None req-c2d539d3-de13-4c75-ae3a-0faf9f55c330 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 719.690931] env[68638]: value = "task-2833361" [ 719.690931] env[68638]: _type = "Task" [ 719.690931] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.699106] env[68638]: DEBUG oslo_vmware.api [None req-c2d539d3-de13-4c75-ae3a-0faf9f55c330 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833361, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.993789] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 809416da-af6c-429d-b4b2-5334768aa744 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 719.994439] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Total usable vcpus: 48, total allocated vcpus: 20 {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 719.994439] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4416MB phys_disk=200GB used_disk=20GB total_vcpus=48 used_vcpus=20 pci_stats=[] {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 720.139411] env[68638]: DEBUG oslo_vmware.api [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833360, 'name': Destroy_Task, 'duration_secs': 0.804364} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.139679] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Destroyed the VM [ 720.139911] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Deleting Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 720.140177] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-546161a0-cd5f-4aea-8e6b-a8a41673e8a6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.151194] env[68638]: DEBUG oslo_vmware.api [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 720.151194] env[68638]: value = "task-2833362" [ 720.151194] env[68638]: _type = "Task" [ 720.151194] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.156899] env[68638]: DEBUG oslo_vmware.api [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833362, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.201928] env[68638]: DEBUG oslo_vmware.api [None req-c2d539d3-de13-4c75-ae3a-0faf9f55c330 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833361, 'name': PowerOffVM_Task, 'duration_secs': 0.295158} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.204454] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2d539d3-de13-4c75-ae3a-0faf9f55c330 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 720.204695] env[68638]: DEBUG nova.compute.manager [None req-c2d539d3-de13-4c75-ae3a-0faf9f55c330 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 720.205939] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06895e77-c926-49bb-bb20-34bf4e4f2960 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.233532] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "5a28d684-584b-4e13-9910-183119ce5d37" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.233791] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "5a28d684-584b-4e13-9910-183119ce5d37" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 720.234030] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "5a28d684-584b-4e13-9910-183119ce5d37-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.234212] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "5a28d684-584b-4e13-9910-183119ce5d37-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 720.234380] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "5a28d684-584b-4e13-9910-183119ce5d37-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 720.236432] env[68638]: INFO nova.compute.manager [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Terminating instance [ 720.478057] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f5375a0-c4a1-40b9-a08d-ab51987cd093 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.487151] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e07bcf-4f01-4bfd-a5be-0c1ee67cf5d1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.520153] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c06b92a-cd99-45b5-a964-f5d1743b63a1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.527269] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba006197-cf11-4e14-bb50-ec7fffb153da {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.541649] env[68638]: DEBUG nova.compute.provider_tree [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 720.661526] env[68638]: DEBUG oslo_vmware.api [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833362, 'name': RemoveSnapshot_Task, 'duration_secs': 0.337048} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.662384] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Deleted Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 720.662699] env[68638]: INFO nova.compute.manager [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Took 14.96 seconds to snapshot the instance on the hypervisor. [ 720.713640] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.713916] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 720.720104] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c2d539d3-de13-4c75-ae3a-0faf9f55c330 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "24982641-40ec-4fab-8385-1bc9dea6ade1" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.054s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 720.745025] env[68638]: DEBUG nova.compute.manager [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 720.745025] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 720.745025] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e7e5be2-3b98-4163-b281-4dd6745053b4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.751701] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 720.752094] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac6ee6c6-bd91-45a4-ae15-f762dfda11ce {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.810037] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 720.810285] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 720.810500] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Deleting the datastore file [datastore2] 5a28d684-584b-4e13-9910-183119ce5d37 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 720.810759] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b93780f4-755d-4026-836e-41c0624dc996 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.818181] env[68638]: DEBUG oslo_vmware.api [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 720.818181] env[68638]: value = "task-2833364" [ 720.818181] env[68638]: _type = "Task" [ 720.818181] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.825975] env[68638]: DEBUG oslo_vmware.api [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833364, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.044970] env[68638]: DEBUG nova.scheduler.client.report [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 721.167161] env[68638]: DEBUG nova.compute.manager [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Instance disappeared during snapshot {{(pid=68638) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 721.177586] env[68638]: DEBUG nova.compute.manager [None req-825e8804-39da-4a31-89ba-6098fec8a850 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Image not found during clean up 39920d52-dd3b-4dcd-9368-2d96d73cf30e {{(pid=68638) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 721.266357] env[68638]: INFO nova.compute.manager [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Rebuilding instance [ 721.306696] env[68638]: DEBUG nova.compute.manager [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 721.307579] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90f3b4df-5b7b-4b35-83a5-2f6556c96281 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.326149] env[68638]: DEBUG oslo_vmware.api [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833364, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18764} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.326401] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 721.327039] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 721.327039] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 721.327039] env[68638]: INFO nova.compute.manager [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Took 0.58 seconds to destroy the instance on the hypervisor. [ 721.327198] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 721.327346] env[68638]: DEBUG nova.compute.manager [-] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 721.327439] env[68638]: DEBUG nova.network.neutron [-] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 721.549715] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68638) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 721.550009] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 10.190s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 721.550315] env[68638]: DEBUG oslo_concurrency.lockutils [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 721.551814] env[68638]: INFO nova.compute.claims [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 721.602803] env[68638]: DEBUG nova.compute.manager [req-488e9229-d632-4c41-b337-eb5c063555ea req-6e3996c3-d6d0-445a-8352-e67e0a7168b4 service nova] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Received event network-vif-deleted-945ff6d9-4999-47aa-b917-48298ca743df {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 721.603322] env[68638]: INFO nova.compute.manager [req-488e9229-d632-4c41-b337-eb5c063555ea req-6e3996c3-d6d0-445a-8352-e67e0a7168b4 service nova] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Neutron deleted interface 945ff6d9-4999-47aa-b917-48298ca743df; detaching it from the instance and deleting it from the info cache [ 721.603518] env[68638]: DEBUG nova.network.neutron [req-488e9229-d632-4c41-b337-eb5c063555ea req-6e3996c3-d6d0-445a-8352-e67e0a7168b4 service nova] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.085352] env[68638]: DEBUG nova.network.neutron [-] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.106051] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d9efdae9-5cf6-4ce4-a391-ed584b24e777 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.115184] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb074e93-a89b-486a-95df-0672a72a2596 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.149157] env[68638]: DEBUG nova.compute.manager [req-488e9229-d632-4c41-b337-eb5c063555ea req-6e3996c3-d6d0-445a-8352-e67e0a7168b4 service nova] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Detach interface failed, port_id=945ff6d9-4999-47aa-b917-48298ca743df, reason: Instance 5a28d684-584b-4e13-9910-183119ce5d37 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 722.277070] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ccbba5f0-87c7-4a1c-b720-efdc853dcc89 tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Acquiring lock "ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.277306] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ccbba5f0-87c7-4a1c-b720-efdc853dcc89 tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Lock "ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.321027] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 722.321850] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6e351fc2-f12d-4125-bf5e-984c1ae93e9f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.328443] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 722.328443] env[68638]: value = "task-2833365" [ 722.328443] env[68638]: _type = "Task" [ 722.328443] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.335993] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833365, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.589524] env[68638]: INFO nova.compute.manager [-] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Took 1.26 seconds to deallocate network for instance. [ 722.718561] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Acquiring lock "4b5c5b9e-389d-4ed9-a860-bd41a33fbac4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.718561] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Lock "4b5c5b9e-389d-4ed9-a860-bd41a33fbac4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.718768] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Acquiring lock "4b5c5b9e-389d-4ed9-a860-bd41a33fbac4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.718843] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Lock "4b5c5b9e-389d-4ed9-a860-bd41a33fbac4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.719684] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Lock "4b5c5b9e-389d-4ed9-a860-bd41a33fbac4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 722.720816] env[68638]: INFO nova.compute.manager [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Terminating instance [ 722.780927] env[68638]: DEBUG nova.compute.utils [None req-ccbba5f0-87c7-4a1c-b720-efdc853dcc89 tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 722.841357] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] VM already powered off {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 722.841625] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 722.842418] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ce8b877-eb35-45d7-af2f-cd34c5302907 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.851135] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 722.851382] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2bad10c3-226c-4255-bdca-eadb8fb93f37 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.905452] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 722.905724] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 722.905958] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Deleting the datastore file [datastore2] 24982641-40ec-4fab-8385-1bc9dea6ade1 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 722.906285] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e4e3239-da77-4d2c-a1a2-e1685fb6286c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.914176] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 722.914176] env[68638]: value = "task-2833367" [ 722.914176] env[68638]: _type = "Task" [ 722.914176] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.922043] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833367, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.053487] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6e6f161-8b26-4a73-be7c-9972ccbc7001 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.060370] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd22461b-78a5-41c9-9d06-461989ff0209 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.089529] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-991641e9-b20b-4ec8-bad8-902f246b7f44 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.096311] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.097601] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f218e7e6-f38e-4b13-bbc0-d4169d926ba7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.110551] env[68638]: DEBUG nova.compute.provider_tree [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 723.225245] env[68638]: DEBUG nova.compute.manager [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 723.225470] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 723.226651] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31dddf9e-5c7b-429b-9bb1-990b87c930cd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.236698] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 723.236918] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-844b37b0-f5fe-4e38-9394-627acf1a564f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.243030] env[68638]: DEBUG oslo_vmware.api [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for the task: (returnval){ [ 723.243030] env[68638]: value = "task-2833368" [ 723.243030] env[68638]: _type = "Task" [ 723.243030] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.251257] env[68638]: DEBUG oslo_vmware.api [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833368, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.285237] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ccbba5f0-87c7-4a1c-b720-efdc853dcc89 tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Lock "ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 723.329403] env[68638]: DEBUG oslo_concurrency.lockutils [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Acquiring lock "27ff37a6-de93-4a4b-904f-a91fdb8b0aff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.329403] env[68638]: DEBUG oslo_concurrency.lockutils [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Lock "27ff37a6-de93-4a4b-904f-a91fdb8b0aff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 723.424018] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833367, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.124981} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.424614] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 723.425141] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 723.425474] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 723.614027] env[68638]: DEBUG nova.scheduler.client.report [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 723.753037] env[68638]: DEBUG oslo_vmware.api [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833368, 'name': PowerOffVM_Task, 'duration_secs': 0.182991} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.753203] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 723.753448] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 723.753997] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c9fbd02-da6e-4819-9f4a-017c2e767b83 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.819700] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 723.820009] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 723.820245] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Deleting the datastore file [datastore1] 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 723.820644] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4c1d5d7-627f-4ad7-8e94-1de64b472caa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.829984] env[68638]: DEBUG oslo_vmware.api [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for the task: (returnval){ [ 723.829984] env[68638]: value = "task-2833370" [ 723.829984] env[68638]: _type = "Task" [ 723.829984] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.838043] env[68638]: DEBUG oslo_vmware.api [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833370, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.121036] env[68638]: DEBUG oslo_concurrency.lockutils [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.569s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.121036] env[68638]: DEBUG nova.compute.manager [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 724.122894] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.781s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.124779] env[68638]: INFO nova.compute.claims [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 724.339364] env[68638]: DEBUG oslo_vmware.api [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833370, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170193} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.339638] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 724.339806] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 724.339981] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 724.340163] env[68638]: INFO nova.compute.manager [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Took 1.11 seconds to destroy the instance on the hypervisor. [ 724.340504] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 724.340631] env[68638]: DEBUG nova.compute.manager [-] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 724.340729] env[68638]: DEBUG nova.network.neutron [-] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 724.364313] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ccbba5f0-87c7-4a1c-b720-efdc853dcc89 tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Acquiring lock "ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 724.364601] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ccbba5f0-87c7-4a1c-b720-efdc853dcc89 tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Lock "ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.364782] env[68638]: INFO nova.compute.manager [None req-ccbba5f0-87c7-4a1c-b720-efdc853dcc89 tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Attaching volume 9f8ab47d-9d2e-4915-9bc1-95f0375f5a92 to /dev/sdb [ 724.405929] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8699ce8d-49c4-46ff-a5cd-03a0392c955f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.413182] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036d477a-fbc8-412c-be52-b480e6854a30 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.426584] env[68638]: DEBUG nova.virt.block_device [None req-ccbba5f0-87c7-4a1c-b720-efdc853dcc89 tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Updating existing volume attachment record: e5c8c698-efd6-493a-939b-0029712ae5c7 {{(pid=68638) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 724.462817] env[68638]: DEBUG nova.virt.hardware [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 724.463926] env[68638]: DEBUG nova.virt.hardware [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 724.463926] env[68638]: DEBUG nova.virt.hardware [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 724.463926] env[68638]: DEBUG nova.virt.hardware [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 724.463926] env[68638]: DEBUG nova.virt.hardware [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 724.463926] env[68638]: DEBUG nova.virt.hardware [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 724.463926] env[68638]: DEBUG nova.virt.hardware [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 724.464132] env[68638]: DEBUG nova.virt.hardware [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 724.464287] env[68638]: DEBUG nova.virt.hardware [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 724.464555] env[68638]: DEBUG nova.virt.hardware [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 724.464792] env[68638]: DEBUG nova.virt.hardware [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 724.465887] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f290f82-ec97-43f5-a3ad-a82ef132b609 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.473665] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bcc9175-2032-4c2c-89b2-6837142bf718 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.488900] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:e2:9b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69cfa7ba-6989-4d75-9495-97b5fea00c3c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'db439fed-d2ec-4e34-b43e-677c18b30fc9', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 724.495067] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 724.495304] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 724.495509] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ab216b9-c526-4a26-951e-fded53bcca7b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.514237] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 724.514237] env[68638]: value = "task-2833371" [ 724.514237] env[68638]: _type = "Task" [ 724.514237] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.522189] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833371, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.630623] env[68638]: DEBUG nova.compute.utils [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 724.632802] env[68638]: DEBUG nova.compute.manager [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Not allocating networking since 'none' was specified. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 724.898835] env[68638]: DEBUG nova.compute.manager [req-dd98f628-a13f-4f3f-8e3b-be310d16e88a req-a53906a1-4b2c-4285-8beb-243abca4da09 service nova] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Received event network-vif-deleted-173c13c1-a5ec-4a6b-98d8-e039626a047e {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 724.899062] env[68638]: INFO nova.compute.manager [req-dd98f628-a13f-4f3f-8e3b-be310d16e88a req-a53906a1-4b2c-4285-8beb-243abca4da09 service nova] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Neutron deleted interface 173c13c1-a5ec-4a6b-98d8-e039626a047e; detaching it from the instance and deleting it from the info cache [ 724.899217] env[68638]: DEBUG nova.network.neutron [req-dd98f628-a13f-4f3f-8e3b-be310d16e88a req-a53906a1-4b2c-4285-8beb-243abca4da09 service nova] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.034164] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833371, 'name': CreateVM_Task, 'duration_secs': 0.330833} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.034473] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 725.035199] env[68638]: DEBUG oslo_concurrency.lockutils [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.035367] env[68638]: DEBUG oslo_concurrency.lockutils [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 725.035717] env[68638]: DEBUG oslo_concurrency.lockutils [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 725.035985] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-012bd292-70f9-41ae-8d92-fc8ccd740456 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.042239] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 725.042239] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5289adb0-4c89-e740-fc76-eb68a1f04f87" [ 725.042239] env[68638]: _type = "Task" [ 725.042239] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.052404] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5289adb0-4c89-e740-fc76-eb68a1f04f87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.135568] env[68638]: DEBUG nova.compute.manager [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 725.380034] env[68638]: DEBUG nova.network.neutron [-] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.402713] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-31625691-60e2-4442-9b83-1f33ac93e247 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.415076] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7945c5a5-a5f5-4f2b-92bf-94b59932b7cf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.450362] env[68638]: DEBUG nova.compute.manager [req-dd98f628-a13f-4f3f-8e3b-be310d16e88a req-a53906a1-4b2c-4285-8beb-243abca4da09 service nova] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Detach interface failed, port_id=173c13c1-a5ec-4a6b-98d8-e039626a047e, reason: Instance 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 725.555079] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5289adb0-4c89-e740-fc76-eb68a1f04f87, 'name': SearchDatastore_Task, 'duration_secs': 0.009542} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.555438] env[68638]: DEBUG oslo_concurrency.lockutils [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 725.555716] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 725.555992] env[68638]: DEBUG oslo_concurrency.lockutils [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.556191] env[68638]: DEBUG oslo_concurrency.lockutils [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 725.556587] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 725.556699] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb4230ff-73ea-462a-b168-e6bc1ac87530 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.564545] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 725.564875] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 725.565522] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2d0d20f-b42b-4aa2-8244-14a910aae4b9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.570525] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 725.570525] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52659f99-9b08-e8ed-7b7b-682599a7fe05" [ 725.570525] env[68638]: _type = "Task" [ 725.570525] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.581510] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52659f99-9b08-e8ed-7b7b-682599a7fe05, 'name': SearchDatastore_Task, 'duration_secs': 0.008219} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.582301] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ab7706b-3c18-4842-a4d4-c66d74d45b55 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.589984] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 725.589984] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52c6cc63-d527-242a-8549-905c7a4b65c9" [ 725.589984] env[68638]: _type = "Task" [ 725.589984] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.598536] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52c6cc63-d527-242a-8549-905c7a4b65c9, 'name': SearchDatastore_Task, 'duration_secs': 0.008487} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.598794] env[68638]: DEBUG oslo_concurrency.lockutils [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 725.599137] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 24982641-40ec-4fab-8385-1bc9dea6ade1/24982641-40ec-4fab-8385-1bc9dea6ade1.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 725.601674] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ff7d37b-955d-4b0f-87fe-54314c2f1d18 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.608165] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 725.608165] env[68638]: value = "task-2833375" [ 725.608165] env[68638]: _type = "Task" [ 725.608165] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.618956] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833375, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.795048] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b58e3cb2-5410-4acb-b6c5-839ed23f2510 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.802681] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1134aa90-ae2d-434c-8673-2053edb76dd6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.835685] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b16657-9d93-41b2-a642-7235c1abd2fb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.843122] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b56527-c36a-4af7-b065-b5680ace3fea {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.856213] env[68638]: DEBUG nova.compute.provider_tree [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 725.884500] env[68638]: INFO nova.compute.manager [-] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Took 1.54 seconds to deallocate network for instance. [ 726.123339] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833375, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.149202] env[68638]: DEBUG nova.compute.manager [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 726.191394] env[68638]: DEBUG nova.virt.hardware [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 726.191664] env[68638]: DEBUG nova.virt.hardware [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 726.191861] env[68638]: DEBUG nova.virt.hardware [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 726.192126] env[68638]: DEBUG nova.virt.hardware [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 726.192370] env[68638]: DEBUG nova.virt.hardware [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 726.192657] env[68638]: DEBUG nova.virt.hardware [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 726.192994] env[68638]: DEBUG nova.virt.hardware [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 726.193332] env[68638]: DEBUG nova.virt.hardware [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 726.193636] env[68638]: DEBUG nova.virt.hardware [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 726.193918] env[68638]: DEBUG nova.virt.hardware [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 726.194257] env[68638]: DEBUG nova.virt.hardware [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 726.195756] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba6c0f0d-f268-4df9-b9b9-e796535e270f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.210153] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87bda7ac-e193-4e6c-b69b-cabbc5c74806 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.233719] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Instance VIF info [] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 726.244360] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 726.244943] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 726.245376] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e0117675-4639-463d-8d31-b911a8808e4c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.274039] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 726.274039] env[68638]: value = "task-2833376" [ 726.274039] env[68638]: _type = "Task" [ 726.274039] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.287771] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833376, 'name': CreateVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.362577] env[68638]: DEBUG nova.scheduler.client.report [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 726.391146] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.618819] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833375, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.918073} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.619143] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 24982641-40ec-4fab-8385-1bc9dea6ade1/24982641-40ec-4fab-8385-1bc9dea6ade1.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 726.619371] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 726.619627] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e0f8120-e5ee-4c59-9f5e-b544e8de8d98 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.625522] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 726.625522] env[68638]: value = "task-2833377" [ 726.625522] env[68638]: _type = "Task" [ 726.625522] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.632774] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833377, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.783301] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833376, 'name': CreateVM_Task, 'duration_secs': 0.438308} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.783471] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 726.783944] env[68638]: DEBUG oslo_concurrency.lockutils [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.784124] env[68638]: DEBUG oslo_concurrency.lockutils [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 726.784434] env[68638]: DEBUG oslo_concurrency.lockutils [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 726.784680] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd1f971f-2c3a-44e7-b4ae-ebf328ca927d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.790099] env[68638]: DEBUG oslo_vmware.api [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 726.790099] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5254dfd0-71f2-fe1f-8a22-b31afc08b539" [ 726.790099] env[68638]: _type = "Task" [ 726.790099] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.797799] env[68638]: DEBUG oslo_vmware.api [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5254dfd0-71f2-fe1f-8a22-b31afc08b539, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.870973] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.748s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.871466] env[68638]: DEBUG nova.compute.manager [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 726.874231] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.106s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.875706] env[68638]: INFO nova.compute.claims [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 727.134798] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833377, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069142} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.137161] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 727.137161] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-982268c2-9ce5-4909-a22e-72363fa29409 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.157523] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Reconfiguring VM instance instance-00000026 to attach disk [datastore2] 24982641-40ec-4fab-8385-1bc9dea6ade1/24982641-40ec-4fab-8385-1bc9dea6ade1.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 727.157807] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ef5650e-f9d9-47f1-a36c-5a6deb1bbefb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.178148] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 727.178148] env[68638]: value = "task-2833379" [ 727.178148] env[68638]: _type = "Task" [ 727.178148] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.187394] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833379, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.299548] env[68638]: DEBUG oslo_vmware.api [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5254dfd0-71f2-fe1f-8a22-b31afc08b539, 'name': SearchDatastore_Task, 'duration_secs': 0.009119} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.299888] env[68638]: DEBUG oslo_concurrency.lockutils [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 727.300151] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 727.300465] env[68638]: DEBUG oslo_concurrency.lockutils [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.300556] env[68638]: DEBUG oslo_concurrency.lockutils [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 727.300709] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 727.300966] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-33664bbf-f069-43c6-a60f-25d90bfe2c20 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.310790] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 727.311078] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 727.312201] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-126924fe-9876-4c59-8255-3a8b185f26a0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.317512] env[68638]: DEBUG oslo_vmware.api [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 727.317512] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52255dcb-7912-6ab9-6f2c-fdc2f66886e0" [ 727.317512] env[68638]: _type = "Task" [ 727.317512] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.325698] env[68638]: DEBUG oslo_vmware.api [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52255dcb-7912-6ab9-6f2c-fdc2f66886e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.383623] env[68638]: DEBUG nova.compute.utils [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 727.385277] env[68638]: DEBUG nova.compute.manager [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Not allocating networking since 'none' was specified. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 727.688278] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833379, 'name': ReconfigVM_Task, 'duration_secs': 0.278583} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.688568] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Reconfigured VM instance instance-00000026 to attach disk [datastore2] 24982641-40ec-4fab-8385-1bc9dea6ade1/24982641-40ec-4fab-8385-1bc9dea6ade1.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 727.689323] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-918366a0-6959-416b-b371-c6d01c539d63 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.695715] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 727.695715] env[68638]: value = "task-2833380" [ 727.695715] env[68638]: _type = "Task" [ 727.695715] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.702980] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833380, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.828088] env[68638]: DEBUG oslo_vmware.api [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52255dcb-7912-6ab9-6f2c-fdc2f66886e0, 'name': SearchDatastore_Task, 'duration_secs': 0.012278} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.828933] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88e4e74a-d559-4630-b345-b2f965458aa1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.834039] env[68638]: DEBUG oslo_vmware.api [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 727.834039] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52371a6d-1465-1ed3-dfa4-f5884be5fe7e" [ 727.834039] env[68638]: _type = "Task" [ 727.834039] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.841177] env[68638]: DEBUG oslo_vmware.api [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52371a6d-1465-1ed3-dfa4-f5884be5fe7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.888871] env[68638]: DEBUG nova.compute.manager [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 728.206365] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833380, 'name': Rename_Task, 'duration_secs': 0.137984} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.208817] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 728.209236] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6ca9be72-01b2-49c5-a8a6-b599ca1c0b07 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.215819] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 728.215819] env[68638]: value = "task-2833381" [ 728.215819] env[68638]: _type = "Task" [ 728.215819] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.226339] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833381, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.344695] env[68638]: DEBUG oslo_vmware.api [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52371a6d-1465-1ed3-dfa4-f5884be5fe7e, 'name': SearchDatastore_Task, 'duration_secs': 0.008668} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.347124] env[68638]: DEBUG oslo_concurrency.lockutils [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 728.347389] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] b9736ec5-6332-4202-95d6-a3cd1d1f11d7/b9736ec5-6332-4202-95d6-a3cd1d1f11d7.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 728.348028] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c2d67fc8-7274-49f1-bde3-2730144707c9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.354252] env[68638]: DEBUG oslo_vmware.api [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 728.354252] env[68638]: value = "task-2833382" [ 728.354252] env[68638]: _type = "Task" [ 728.354252] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.364394] env[68638]: DEBUG oslo_vmware.api [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833382, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.397032] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e4867b3-2e9f-4199-aa1a-08d781161a47 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.405029] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6182583-c10d-44e7-a9cd-068325da80ca {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.437965] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee77e8d2-ee41-4aba-b17b-ec8458374973 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.445894] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d7a1040-587d-4680-8068-6a1c41c033a8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.460034] env[68638]: DEBUG nova.compute.provider_tree [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 728.726112] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833381, 'name': PowerOnVM_Task, 'duration_secs': 0.470343} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.726414] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 728.726647] env[68638]: DEBUG nova.compute.manager [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 728.727833] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1b40624-ba16-44ca-85c9-e44ad8091fa2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.863804] env[68638]: DEBUG oslo_vmware.api [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833382, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486583} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.863804] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] b9736ec5-6332-4202-95d6-a3cd1d1f11d7/b9736ec5-6332-4202-95d6-a3cd1d1f11d7.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 728.864212] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 728.864212] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-427d1366-e5c8-46f0-9f88-3e7d042c9ebb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.869259] env[68638]: DEBUG oslo_vmware.api [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 728.869259] env[68638]: value = "task-2833383" [ 728.869259] env[68638]: _type = "Task" [ 728.869259] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.876589] env[68638]: DEBUG oslo_vmware.api [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833383, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.900875] env[68638]: DEBUG nova.compute.manager [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 728.927537] env[68638]: DEBUG nova.virt.hardware [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 728.927839] env[68638]: DEBUG nova.virt.hardware [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 728.928119] env[68638]: DEBUG nova.virt.hardware [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 728.928357] env[68638]: DEBUG nova.virt.hardware [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 728.928534] env[68638]: DEBUG nova.virt.hardware [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 728.928687] env[68638]: DEBUG nova.virt.hardware [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 728.928894] env[68638]: DEBUG nova.virt.hardware [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 728.929076] env[68638]: DEBUG nova.virt.hardware [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 728.929277] env[68638]: DEBUG nova.virt.hardware [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 728.929543] env[68638]: DEBUG nova.virt.hardware [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 728.929814] env[68638]: DEBUG nova.virt.hardware [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 728.930863] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cc40203-881e-4665-98c3-6fcde61c5193 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.938678] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f885bce-4b0b-4a46-9f0b-e809916070e6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.953080] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Instance VIF info [] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 728.957701] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Creating folder: Project (923d3eac3b0042fa8447ed9f450cb9b6). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 728.958017] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3905c81c-f081-419c-952c-3360233cdae1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.962719] env[68638]: DEBUG nova.scheduler.client.report [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 728.967879] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Created folder: Project (923d3eac3b0042fa8447ed9f450cb9b6) in parent group-v569734. [ 728.968110] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Creating folder: Instances. Parent ref: group-v569855. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 728.968292] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b7bb7489-1e5c-43fb-b66f-d9f2e0f99b14 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.976532] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Created folder: Instances in parent group-v569855. [ 728.977176] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 728.978501] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 728.978501] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-40128c53-30e2-450a-b1ef-3e6bcd9df81b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.992249] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccbba5f0-87c7-4a1c-b720-efdc853dcc89 tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Volume attach. Driver type: vmdk {{(pid=68638) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 728.992522] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccbba5f0-87c7-4a1c-b720-efdc853dcc89 tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569853', 'volume_id': '9f8ab47d-9d2e-4915-9bc1-95f0375f5a92', 'name': 'volume-9f8ab47d-9d2e-4915-9bc1-95f0375f5a92', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac', 'attached_at': '', 'detached_at': '', 'volume_id': '9f8ab47d-9d2e-4915-9bc1-95f0375f5a92', 'serial': '9f8ab47d-9d2e-4915-9bc1-95f0375f5a92'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 728.994766] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-383d3d20-b82e-4da5-8101-f69097d9d099 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.998647] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 728.998647] env[68638]: value = "task-2833386" [ 728.998647] env[68638]: _type = "Task" [ 728.998647] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.014258] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b800607a-0c0d-4255-82d3-c00c80776bcf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.019901] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833386, 'name': CreateVM_Task} progress is 15%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.044043] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccbba5f0-87c7-4a1c-b720-efdc853dcc89 tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Reconfiguring VM instance instance-0000000c to attach disk [datastore2] volume-9f8ab47d-9d2e-4915-9bc1-95f0375f5a92/volume-9f8ab47d-9d2e-4915-9bc1-95f0375f5a92.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 729.044043] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-319df78e-e4c3-4f4f-a2c7-6d02fa4194e9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.059339] env[68638]: DEBUG oslo_vmware.api [None req-ccbba5f0-87c7-4a1c-b720-efdc853dcc89 tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Waiting for the task: (returnval){ [ 729.059339] env[68638]: value = "task-2833387" [ 729.059339] env[68638]: _type = "Task" [ 729.059339] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.067164] env[68638]: DEBUG oslo_vmware.api [None req-ccbba5f0-87c7-4a1c-b720-efdc853dcc89 tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Task: {'id': task-2833387, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.241384] env[68638]: INFO nova.compute.manager [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] bringing vm to original state: 'stopped' [ 729.381248] env[68638]: DEBUG oslo_vmware.api [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833383, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063452} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.382059] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 729.382735] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea634574-c4ae-4f38-89cb-23cbffbbcf50 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.412372] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Reconfiguring VM instance instance-00000027 to attach disk [datastore2] b9736ec5-6332-4202-95d6-a3cd1d1f11d7/b9736ec5-6332-4202-95d6-a3cd1d1f11d7.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 729.412798] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eea2d67c-17f3-4bed-80a3-bbf877626022 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.438524] env[68638]: DEBUG oslo_vmware.api [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 729.438524] env[68638]: value = "task-2833388" [ 729.438524] env[68638]: _type = "Task" [ 729.438524] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.446436] env[68638]: DEBUG oslo_vmware.api [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833388, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.467500] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.593s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 729.468057] env[68638]: DEBUG nova.compute.manager [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 729.470978] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.233s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 729.474511] env[68638]: INFO nova.compute.claims [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 729.510572] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833386, 'name': CreateVM_Task, 'duration_secs': 0.266383} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.510751] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 729.511194] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.511365] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 729.511692] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 729.512200] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4780f6a8-ea23-4474-9301-5c77f496e695 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.517213] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Waiting for the task: (returnval){ [ 729.517213] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]520b4a48-10f5-4d0c-e7e4-1cd9617b2d0d" [ 729.517213] env[68638]: _type = "Task" [ 729.517213] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.524939] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]520b4a48-10f5-4d0c-e7e4-1cd9617b2d0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.570867] env[68638]: DEBUG oslo_vmware.api [None req-ccbba5f0-87c7-4a1c-b720-efdc853dcc89 tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Task: {'id': task-2833387, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.948867] env[68638]: DEBUG oslo_vmware.api [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833388, 'name': ReconfigVM_Task, 'duration_secs': 0.264963} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.949203] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Reconfigured VM instance instance-00000027 to attach disk [datastore2] b9736ec5-6332-4202-95d6-a3cd1d1f11d7/b9736ec5-6332-4202-95d6-a3cd1d1f11d7.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 729.949773] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-81fa2190-978d-485b-8be1-4fdf5855c19e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.955707] env[68638]: DEBUG oslo_vmware.api [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 729.955707] env[68638]: value = "task-2833389" [ 729.955707] env[68638]: _type = "Task" [ 729.955707] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.963509] env[68638]: DEBUG oslo_vmware.api [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833389, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.976826] env[68638]: DEBUG nova.compute.utils [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 729.981154] env[68638]: DEBUG nova.compute.manager [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 729.981154] env[68638]: DEBUG nova.network.neutron [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 730.027859] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]520b4a48-10f5-4d0c-e7e4-1cd9617b2d0d, 'name': SearchDatastore_Task, 'duration_secs': 0.009931} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.028167] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 730.028387] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 730.028612] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.028757] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 730.028933] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 730.029201] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2543f0e2-c4f3-4645-9d97-adffbb73b253 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.035101] env[68638]: DEBUG nova.policy [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c1f3c7982b2c4f7595c53767e752512d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '82f81cb755f54484a45c7732db68c4a7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 730.037666] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 730.037840] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 730.038546] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffcb37dd-1067-4baa-9820-280441661ef0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.043548] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Waiting for the task: (returnval){ [ 730.043548] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52072cac-8e3f-8871-b649-a54385bc7323" [ 730.043548] env[68638]: _type = "Task" [ 730.043548] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.051570] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52072cac-8e3f-8871-b649-a54385bc7323, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.068179] env[68638]: DEBUG oslo_vmware.api [None req-ccbba5f0-87c7-4a1c-b720-efdc853dcc89 tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Task: {'id': task-2833387, 'name': ReconfigVM_Task, 'duration_secs': 0.604813} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.068449] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccbba5f0-87c7-4a1c-b720-efdc853dcc89 tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Reconfigured VM instance instance-0000000c to attach disk [datastore2] volume-9f8ab47d-9d2e-4915-9bc1-95f0375f5a92/volume-9f8ab47d-9d2e-4915-9bc1-95f0375f5a92.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 730.073288] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-916c6e23-276d-4741-aebc-ed8cd254134f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.086953] env[68638]: DEBUG oslo_vmware.api [None req-ccbba5f0-87c7-4a1c-b720-efdc853dcc89 tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Waiting for the task: (returnval){ [ 730.086953] env[68638]: value = "task-2833390" [ 730.086953] env[68638]: _type = "Task" [ 730.086953] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.094774] env[68638]: DEBUG oslo_vmware.api [None req-ccbba5f0-87c7-4a1c-b720-efdc853dcc89 tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Task: {'id': task-2833390, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.253011] env[68638]: DEBUG oslo_concurrency.lockutils [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "24982641-40ec-4fab-8385-1bc9dea6ade1" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 730.253011] env[68638]: DEBUG oslo_concurrency.lockutils [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "24982641-40ec-4fab-8385-1bc9dea6ade1" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 730.253011] env[68638]: DEBUG nova.compute.manager [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 730.255727] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d85457b0-496d-4c47-80ae-ca392a8ebff8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.260331] env[68638]: DEBUG nova.compute.manager [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68638) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 730.308927] env[68638]: DEBUG nova.network.neutron [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Successfully created port: f312d14a-da0d-42ec-a0a3-2f652c818f26 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 730.468660] env[68638]: DEBUG oslo_vmware.api [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833389, 'name': Rename_Task, 'duration_secs': 0.12639} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.468851] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 730.469563] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3e8b1fc7-a3c3-469c-8a48-bd78dccd9dd2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.475274] env[68638]: DEBUG oslo_vmware.api [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 730.475274] env[68638]: value = "task-2833391" [ 730.475274] env[68638]: _type = "Task" [ 730.475274] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.483584] env[68638]: DEBUG nova.compute.manager [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 730.489221] env[68638]: DEBUG oslo_vmware.api [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833391, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.554119] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52072cac-8e3f-8871-b649-a54385bc7323, 'name': SearchDatastore_Task, 'duration_secs': 0.008387} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.559089] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ed6545c-1d31-4bff-95ee-d6aca00a94e5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.566244] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Waiting for the task: (returnval){ [ 730.566244] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5224dbe1-c161-84d9-6af5-cda198896d92" [ 730.566244] env[68638]: _type = "Task" [ 730.566244] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.573020] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5224dbe1-c161-84d9-6af5-cda198896d92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.601935] env[68638]: DEBUG oslo_vmware.api [None req-ccbba5f0-87c7-4a1c-b720-efdc853dcc89 tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Task: {'id': task-2833390, 'name': ReconfigVM_Task, 'duration_secs': 0.266626} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.602171] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccbba5f0-87c7-4a1c-b720-efdc853dcc89 tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569853', 'volume_id': '9f8ab47d-9d2e-4915-9bc1-95f0375f5a92', 'name': 'volume-9f8ab47d-9d2e-4915-9bc1-95f0375f5a92', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac', 'attached_at': '', 'detached_at': '', 'volume_id': '9f8ab47d-9d2e-4915-9bc1-95f0375f5a92', 'serial': '9f8ab47d-9d2e-4915-9bc1-95f0375f5a92'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 730.763736] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 730.764206] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f6865a6a-eb56-4162-af31-5c2b29b8c5c8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.773695] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 730.773695] env[68638]: value = "task-2833392" [ 730.773695] env[68638]: _type = "Task" [ 730.773695] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.785590] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833392, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.985232] env[68638]: DEBUG oslo_vmware.api [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833391, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.074643] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5224dbe1-c161-84d9-6af5-cda198896d92, 'name': SearchDatastore_Task, 'duration_secs': 0.009971} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.077134] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 731.077393] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 06a1a44f-35ee-45d2-9503-23468150b72f/06a1a44f-35ee-45d2-9503-23468150b72f.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 731.077818] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-edfdc3bb-aeac-4936-baab-c3d2d8b3e3f1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.084468] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Waiting for the task: (returnval){ [ 731.084468] env[68638]: value = "task-2833393" [ 731.084468] env[68638]: _type = "Task" [ 731.084468] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.094601] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833393, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.150386] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc80851d-2403-4c6b-82ef-8e28d4a07f28 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.157346] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e7826e9-ae7c-4e51-b3d7-a81e7dbc6338 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.191344] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b6e3bd-2ed1-4ad2-b94d-eb67b9da3ade {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.200440] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5064a08e-87b9-4793-9c2c-a42deb2ea8c6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.218492] env[68638]: DEBUG nova.compute.provider_tree [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 731.285076] env[68638]: DEBUG oslo_vmware.api [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833392, 'name': PowerOffVM_Task, 'duration_secs': 0.201719} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.285386] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 731.285632] env[68638]: DEBUG nova.compute.manager [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 731.286522] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abcf4e2d-7890-47c6-b4f6-b6cc909eac19 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.488558] env[68638]: DEBUG oslo_vmware.api [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833391, 'name': PowerOnVM_Task, 'duration_secs': 0.525917} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.488558] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 731.488750] env[68638]: INFO nova.compute.manager [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Took 5.34 seconds to spawn the instance on the hypervisor. [ 731.488923] env[68638]: DEBUG nova.compute.manager [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 731.489781] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d403d09-5716-4a35-b9fe-7a0c2e9d4fc6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.493463] env[68638]: DEBUG nova.compute.manager [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 731.525750] env[68638]: DEBUG nova.virt.hardware [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 731.526021] env[68638]: DEBUG nova.virt.hardware [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 731.526183] env[68638]: DEBUG nova.virt.hardware [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 731.526367] env[68638]: DEBUG nova.virt.hardware [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 731.526514] env[68638]: DEBUG nova.virt.hardware [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 731.526656] env[68638]: DEBUG nova.virt.hardware [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 731.526864] env[68638]: DEBUG nova.virt.hardware [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 731.527035] env[68638]: DEBUG nova.virt.hardware [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 731.527200] env[68638]: DEBUG nova.virt.hardware [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 731.527367] env[68638]: DEBUG nova.virt.hardware [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 731.527585] env[68638]: DEBUG nova.virt.hardware [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 731.528498] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cafba9e-b0df-4f3d-9784-bf6463f2fc35 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.537623] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b265a8d9-e6f1-417b-a853-67ac17789b3d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.593606] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833393, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.479238} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.593857] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 06a1a44f-35ee-45d2-9503-23468150b72f/06a1a44f-35ee-45d2-9503-23468150b72f.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 731.594088] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 731.594345] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-86bd3553-770a-490e-9ffa-6faaf792e026 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.600308] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Waiting for the task: (returnval){ [ 731.600308] env[68638]: value = "task-2833394" [ 731.600308] env[68638]: _type = "Task" [ 731.600308] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.607690] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833394, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.691185] env[68638]: DEBUG nova.objects.instance [None req-ccbba5f0-87c7-4a1c-b720-efdc853dcc89 tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Lazy-loading 'flavor' on Instance uuid ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 731.722052] env[68638]: DEBUG nova.scheduler.client.report [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 731.800735] env[68638]: DEBUG oslo_concurrency.lockutils [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "24982641-40ec-4fab-8385-1bc9dea6ade1" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.548s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 731.876678] env[68638]: DEBUG nova.compute.manager [req-f80be398-55ea-4eea-bad5-058656717f83 req-8475d155-1464-4916-8b13-f205025c4cde service nova] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Received event network-vif-plugged-f312d14a-da0d-42ec-a0a3-2f652c818f26 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 731.876678] env[68638]: DEBUG oslo_concurrency.lockutils [req-f80be398-55ea-4eea-bad5-058656717f83 req-8475d155-1464-4916-8b13-f205025c4cde service nova] Acquiring lock "072be237-c51e-43d2-ad84-46122ef9f335-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 731.876678] env[68638]: DEBUG oslo_concurrency.lockutils [req-f80be398-55ea-4eea-bad5-058656717f83 req-8475d155-1464-4916-8b13-f205025c4cde service nova] Lock "072be237-c51e-43d2-ad84-46122ef9f335-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 731.876678] env[68638]: DEBUG oslo_concurrency.lockutils [req-f80be398-55ea-4eea-bad5-058656717f83 req-8475d155-1464-4916-8b13-f205025c4cde service nova] Lock "072be237-c51e-43d2-ad84-46122ef9f335-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 731.876678] env[68638]: DEBUG nova.compute.manager [req-f80be398-55ea-4eea-bad5-058656717f83 req-8475d155-1464-4916-8b13-f205025c4cde service nova] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] No waiting events found dispatching network-vif-plugged-f312d14a-da0d-42ec-a0a3-2f652c818f26 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 731.876678] env[68638]: WARNING nova.compute.manager [req-f80be398-55ea-4eea-bad5-058656717f83 req-8475d155-1464-4916-8b13-f205025c4cde service nova] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Received unexpected event network-vif-plugged-f312d14a-da0d-42ec-a0a3-2f652c818f26 for instance with vm_state building and task_state spawning. [ 731.936038] env[68638]: DEBUG nova.network.neutron [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Successfully updated port: f312d14a-da0d-42ec-a0a3-2f652c818f26 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 732.011850] env[68638]: INFO nova.compute.manager [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Took 49.48 seconds to build instance. [ 732.110799] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833394, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08064} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.111071] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 732.111862] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d42d86-3b24-491f-b2d1-81d1aabf7e23 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.134038] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] 06a1a44f-35ee-45d2-9503-23468150b72f/06a1a44f-35ee-45d2-9503-23468150b72f.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 732.134201] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-264b545b-7622-4135-83fa-3b221cf274d8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.158405] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Waiting for the task: (returnval){ [ 732.158405] env[68638]: value = "task-2833395" [ 732.158405] env[68638]: _type = "Task" [ 732.158405] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.167313] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833395, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.196561] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ccbba5f0-87c7-4a1c-b720-efdc853dcc89 tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Lock "ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.832s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.228030] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.756s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.228030] env[68638]: DEBUG nova.compute.manager [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 732.230303] env[68638]: DEBUG oslo_concurrency.lockutils [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.719s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.230497] env[68638]: DEBUG oslo_concurrency.lockutils [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.232821] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.568s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.234274] env[68638]: INFO nova.compute.claims [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 732.259599] env[68638]: INFO nova.scheduler.client.report [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Deleted allocations for instance e3cf739a-3104-473d-af66-d9974ed1a222 [ 732.307626] env[68638]: DEBUG oslo_concurrency.lockutils [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 732.439066] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Acquiring lock "refresh_cache-072be237-c51e-43d2-ad84-46122ef9f335" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.439214] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Acquired lock "refresh_cache-072be237-c51e-43d2-ad84-46122ef9f335" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 732.439456] env[68638]: DEBUG nova.network.neutron [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 732.513299] env[68638]: DEBUG oslo_concurrency.lockutils [None req-81bbeea4-54a8-41e4-8416-349d77878efd tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Lock "b9736ec5-6332-4202-95d6-a3cd1d1f11d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.642s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.653576] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "24982641-40ec-4fab-8385-1bc9dea6ade1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 732.654543] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "24982641-40ec-4fab-8385-1bc9dea6ade1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.654789] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "24982641-40ec-4fab-8385-1bc9dea6ade1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 732.655309] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "24982641-40ec-4fab-8385-1bc9dea6ade1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.655406] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "24982641-40ec-4fab-8385-1bc9dea6ade1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.658616] env[68638]: INFO nova.compute.manager [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Terminating instance [ 732.678505] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833395, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.738309] env[68638]: DEBUG nova.compute.utils [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 732.739889] env[68638]: DEBUG nova.compute.manager [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 732.740096] env[68638]: DEBUG nova.network.neutron [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 732.767193] env[68638]: DEBUG oslo_concurrency.lockutils [None req-760dbf9f-83c3-432c-be0d-8ff5fcc7baf6 tempest-FloatingIPsAssociationTestJSON-1456426366 tempest-FloatingIPsAssociationTestJSON-1456426366-project-member] Lock "e3cf739a-3104-473d-af66-d9974ed1a222" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.389s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.791884] env[68638]: DEBUG nova.policy [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f2cbfef22a794206a443c59b9b5bc5b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b63d2e4e9fe24cc1aeb4b1569517ea20', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 732.981679] env[68638]: DEBUG nova.network.neutron [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 733.024044] env[68638]: DEBUG nova.compute.manager [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 733.158545] env[68638]: DEBUG nova.network.neutron [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Successfully created port: 8407d492-d594-4996-8547-bfe5c27586e3 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 733.170611] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833395, 'name': ReconfigVM_Task, 'duration_secs': 0.599006} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.171421] env[68638]: DEBUG nova.compute.manager [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 733.171738] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 733.172135] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Reconfigured VM instance instance-00000028 to attach disk [datastore2] 06a1a44f-35ee-45d2-9503-23468150b72f/06a1a44f-35ee-45d2-9503-23468150b72f.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 733.173388] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50bc5d00-6ad9-4915-a899-f72896436d26 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.176568] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d05bbf99-62d4-4b4d-a3e1-28fcdab1750d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.182756] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 733.187030] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b0611669-a509-44b2-bac4-dd767031188f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.187030] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Waiting for the task: (returnval){ [ 733.187030] env[68638]: value = "task-2833396" [ 733.187030] env[68638]: _type = "Task" [ 733.187030] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.189188] env[68638]: DEBUG nova.network.neutron [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Updating instance_info_cache with network_info: [{"id": "f312d14a-da0d-42ec-a0a3-2f652c818f26", "address": "fa:16:3e:55:37:33", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.66", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf312d14a-da", "ovs_interfaceid": "f312d14a-da0d-42ec-a0a3-2f652c818f26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.195964] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833396, 'name': Rename_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.243822] env[68638]: DEBUG nova.compute.manager [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 733.261724] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 733.262336] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 733.262336] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Deleting the datastore file [datastore2] 24982641-40ec-4fab-8385-1bc9dea6ade1 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 733.262695] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-75356aba-d89b-4fa9-a681-e52c2e4eca8a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.271103] env[68638]: DEBUG oslo_vmware.api [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 733.271103] env[68638]: value = "task-2833398" [ 733.271103] env[68638]: _type = "Task" [ 733.271103] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.283458] env[68638]: DEBUG oslo_vmware.api [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833398, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.298943] env[68638]: INFO nova.compute.manager [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Rebuilding instance [ 733.341463] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6957260b-0c45-4fd9-ae28-95c919c2fe6f tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Acquiring lock "ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 733.341719] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6957260b-0c45-4fd9-ae28-95c919c2fe6f tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Lock "ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 733.347791] env[68638]: DEBUG nova.compute.manager [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 733.348704] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e332ade-4bfa-400d-a9a8-bdf390531205 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.553199] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 733.695392] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Releasing lock "refresh_cache-072be237-c51e-43d2-ad84-46122ef9f335" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 733.695693] env[68638]: DEBUG nova.compute.manager [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Instance network_info: |[{"id": "f312d14a-da0d-42ec-a0a3-2f652c818f26", "address": "fa:16:3e:55:37:33", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.66", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf312d14a-da", "ovs_interfaceid": "f312d14a-da0d-42ec-a0a3-2f652c818f26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 733.696064] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833396, 'name': Rename_Task, 'duration_secs': 0.132195} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.697353] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:37:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '205fb402-8eaf-4b61-8f57-8f216024179a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f312d14a-da0d-42ec-a0a3-2f652c818f26', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 733.704557] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 733.704867] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 733.707942] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 733.708215] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a85a294c-baf1-48cb-a3d8-6a547e9be1ac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.710339] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f2f1f36-c50f-4964-95b7-bd4b65b9f4e4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.733788] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Waiting for the task: (returnval){ [ 733.733788] env[68638]: value = "task-2833399" [ 733.733788] env[68638]: _type = "Task" [ 733.733788] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.735120] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 733.735120] env[68638]: value = "task-2833400" [ 733.735120] env[68638]: _type = "Task" [ 733.735120] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.749204] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833399, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.752736] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833400, 'name': CreateVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.780932] env[68638]: DEBUG oslo_vmware.api [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833398, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136056} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.780932] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 733.780932] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 733.781924] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 733.781924] env[68638]: INFO nova.compute.manager [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Took 0.61 seconds to destroy the instance on the hypervisor. [ 733.781924] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 733.784209] env[68638]: DEBUG nova.compute.manager [-] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 733.784341] env[68638]: DEBUG nova.network.neutron [-] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 733.847341] env[68638]: INFO nova.compute.manager [None req-6957260b-0c45-4fd9-ae28-95c919c2fe6f tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Detaching volume 9f8ab47d-9d2e-4915-9bc1-95f0375f5a92 [ 733.892849] env[68638]: INFO nova.virt.block_device [None req-6957260b-0c45-4fd9-ae28-95c919c2fe6f tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Attempting to driver detach volume 9f8ab47d-9d2e-4915-9bc1-95f0375f5a92 from mountpoint /dev/sdb [ 733.893092] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-6957260b-0c45-4fd9-ae28-95c919c2fe6f tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Volume detach. Driver type: vmdk {{(pid=68638) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 733.893322] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-6957260b-0c45-4fd9-ae28-95c919c2fe6f tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569853', 'volume_id': '9f8ab47d-9d2e-4915-9bc1-95f0375f5a92', 'name': 'volume-9f8ab47d-9d2e-4915-9bc1-95f0375f5a92', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac', 'attached_at': '', 'detached_at': '', 'volume_id': '9f8ab47d-9d2e-4915-9bc1-95f0375f5a92', 'serial': '9f8ab47d-9d2e-4915-9bc1-95f0375f5a92'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 733.894581] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90354629-36b7-4dbb-b066-c44ad66bcfbf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.899598] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba6c3fb1-38a6-4d38-acd3-eaba52594f63 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.906330] env[68638]: DEBUG nova.compute.manager [req-b704a324-cbf9-4825-966b-e54f0bbd2967 req-8e47ec60-6827-47ae-b55e-1da81fb639f0 service nova] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Received event network-changed-f312d14a-da0d-42ec-a0a3-2f652c818f26 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 733.906537] env[68638]: DEBUG nova.compute.manager [req-b704a324-cbf9-4825-966b-e54f0bbd2967 req-8e47ec60-6827-47ae-b55e-1da81fb639f0 service nova] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Refreshing instance network info cache due to event network-changed-f312d14a-da0d-42ec-a0a3-2f652c818f26. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 733.906830] env[68638]: DEBUG oslo_concurrency.lockutils [req-b704a324-cbf9-4825-966b-e54f0bbd2967 req-8e47ec60-6827-47ae-b55e-1da81fb639f0 service nova] Acquiring lock "refresh_cache-072be237-c51e-43d2-ad84-46122ef9f335" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.906980] env[68638]: DEBUG oslo_concurrency.lockutils [req-b704a324-cbf9-4825-966b-e54f0bbd2967 req-8e47ec60-6827-47ae-b55e-1da81fb639f0 service nova] Acquired lock "refresh_cache-072be237-c51e-43d2-ad84-46122ef9f335" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 733.907230] env[68638]: DEBUG nova.network.neutron [req-b704a324-cbf9-4825-966b-e54f0bbd2967 req-8e47ec60-6827-47ae-b55e-1da81fb639f0 service nova] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Refreshing network info cache for port f312d14a-da0d-42ec-a0a3-2f652c818f26 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 733.931302] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dfdf143-19b4-4ee4-b0d9-97fb9be1ac86 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.940582] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc8ad84-68e7-4618-bb05-1ef32d449a38 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.950017] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f06f0c8c-5e9c-4cd8-aa9b-38aa554d0d30 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.982315] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f2bacd3-0d1e-4b33-8970-a3ce8e857ba7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.005854] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-116829c0-5209-4983-a39f-5b4dd0e39c86 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.012477] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-046a7954-c0ea-4d00-8535-d1b65c04cfc7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.030823] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-6957260b-0c45-4fd9-ae28-95c919c2fe6f tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] The volume has not been displaced from its original location: [datastore2] volume-9f8ab47d-9d2e-4915-9bc1-95f0375f5a92/volume-9f8ab47d-9d2e-4915-9bc1-95f0375f5a92.vmdk. No consolidation needed. {{(pid=68638) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 734.036443] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-6957260b-0c45-4fd9-ae28-95c919c2fe6f tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Reconfiguring VM instance instance-0000000c to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 734.038117] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-947b3f64-b1ae-4d63-b765-daef613d7593 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.060059] env[68638]: DEBUG nova.compute.provider_tree [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 734.066843] env[68638]: DEBUG oslo_vmware.api [None req-6957260b-0c45-4fd9-ae28-95c919c2fe6f tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Waiting for the task: (returnval){ [ 734.066843] env[68638]: value = "task-2833401" [ 734.066843] env[68638]: _type = "Task" [ 734.066843] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.078146] env[68638]: DEBUG oslo_vmware.api [None req-6957260b-0c45-4fd9-ae28-95c919c2fe6f tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Task: {'id': task-2833401, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.259430] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833399, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.264759] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833400, 'name': CreateVM_Task, 'duration_secs': 0.311958} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.268700] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 734.270230] env[68638]: DEBUG nova.compute.manager [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 734.275364] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.275527] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 734.275927] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 734.276665] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-218ad9c9-5989-4f8f-9fe6-d8ab182e189f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.283533] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Waiting for the task: (returnval){ [ 734.283533] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5258ea70-f341-2a5d-4fc6-d49da16aa94b" [ 734.283533] env[68638]: _type = "Task" [ 734.283533] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.296985] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5258ea70-f341-2a5d-4fc6-d49da16aa94b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.310861] env[68638]: DEBUG nova.virt.hardware [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 734.311239] env[68638]: DEBUG nova.virt.hardware [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 734.311488] env[68638]: DEBUG nova.virt.hardware [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 734.311781] env[68638]: DEBUG nova.virt.hardware [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 734.312030] env[68638]: DEBUG nova.virt.hardware [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 734.312274] env[68638]: DEBUG nova.virt.hardware [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 734.312597] env[68638]: DEBUG nova.virt.hardware [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 734.312867] env[68638]: DEBUG nova.virt.hardware [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 734.313179] env[68638]: DEBUG nova.virt.hardware [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 734.313451] env[68638]: DEBUG nova.virt.hardware [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 734.313735] env[68638]: DEBUG nova.virt.hardware [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 734.315317] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2233a7d-7f3d-4520-83cc-f547d97e968f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.327904] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85624e85-0d47-4499-8369-18e82b3b6cb9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.366191] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 734.367011] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1dae589d-8c5f-4b01-89e3-2ff8c5098458 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.374607] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 734.374607] env[68638]: value = "task-2833402" [ 734.374607] env[68638]: _type = "Task" [ 734.374607] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.385180] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833402, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.393557] env[68638]: DEBUG nova.network.neutron [req-b704a324-cbf9-4825-966b-e54f0bbd2967 req-8e47ec60-6827-47ae-b55e-1da81fb639f0 service nova] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Updated VIF entry in instance network info cache for port f312d14a-da0d-42ec-a0a3-2f652c818f26. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 734.394049] env[68638]: DEBUG nova.network.neutron [req-b704a324-cbf9-4825-966b-e54f0bbd2967 req-8e47ec60-6827-47ae-b55e-1da81fb639f0 service nova] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Updating instance_info_cache with network_info: [{"id": "f312d14a-da0d-42ec-a0a3-2f652c818f26", "address": "fa:16:3e:55:37:33", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.66", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf312d14a-da", "ovs_interfaceid": "f312d14a-da0d-42ec-a0a3-2f652c818f26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.458235] env[68638]: DEBUG nova.compute.manager [req-ed99a0f9-c726-4ee8-80da-a82e31dd0993 req-f33e4137-77fa-4878-820a-6a327e50d8bd service nova] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Received event network-vif-deleted-db439fed-d2ec-4e34-b43e-677c18b30fc9 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 734.458377] env[68638]: INFO nova.compute.manager [req-ed99a0f9-c726-4ee8-80da-a82e31dd0993 req-f33e4137-77fa-4878-820a-6a327e50d8bd service nova] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Neutron deleted interface db439fed-d2ec-4e34-b43e-677c18b30fc9; detaching it from the instance and deleting it from the info cache [ 734.458556] env[68638]: DEBUG nova.network.neutron [req-ed99a0f9-c726-4ee8-80da-a82e31dd0993 req-f33e4137-77fa-4878-820a-6a327e50d8bd service nova] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.563636] env[68638]: DEBUG nova.scheduler.client.report [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 734.576809] env[68638]: DEBUG oslo_vmware.api [None req-6957260b-0c45-4fd9-ae28-95c919c2fe6f tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Task: {'id': task-2833401, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.752950] env[68638]: DEBUG oslo_vmware.api [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833399, 'name': PowerOnVM_Task, 'duration_secs': 0.628902} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.755485] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 734.755818] env[68638]: INFO nova.compute.manager [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Took 5.85 seconds to spawn the instance on the hypervisor. [ 734.756016] env[68638]: DEBUG nova.compute.manager [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 734.757424] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041c0bc7-b05f-4207-ad17-9f2b3b68ce44 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.779923] env[68638]: DEBUG nova.network.neutron [-] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.799097] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5258ea70-f341-2a5d-4fc6-d49da16aa94b, 'name': SearchDatastore_Task, 'duration_secs': 0.010634} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.800263] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 734.800596] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 734.800864] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.801031] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 734.801210] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 734.801719] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6487e1a0-f3c3-4aa1-b371-ead6767a341d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.810328] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 734.810490] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 734.813448] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eff6fa28-ad8b-4d64-9042-384365b2dd24 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.817229] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Waiting for the task: (returnval){ [ 734.817229] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52cdedc4-94a0-778c-de1f-233193737f2c" [ 734.817229] env[68638]: _type = "Task" [ 734.817229] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.826084] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52cdedc4-94a0-778c-de1f-233193737f2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.888442] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833402, 'name': PowerOffVM_Task, 'duration_secs': 0.285411} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.888885] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 734.889250] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 734.890397] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a17118-57b8-4616-ba2f-eb3bc62ab216 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.899356] env[68638]: DEBUG oslo_concurrency.lockutils [req-b704a324-cbf9-4825-966b-e54f0bbd2967 req-8e47ec60-6827-47ae-b55e-1da81fb639f0 service nova] Releasing lock "refresh_cache-072be237-c51e-43d2-ad84-46122ef9f335" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 734.903958] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 734.904622] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2f54d110-464e-4f5d-a64e-d79be4caeb98 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.927424] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 734.927681] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 734.927867] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Deleting the datastore file [datastore2] b9736ec5-6332-4202-95d6-a3cd1d1f11d7 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 734.928146] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-022c6174-3d9d-4bc1-b4eb-b8c1f8c9fe78 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.934549] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 734.934549] env[68638]: value = "task-2833404" [ 734.934549] env[68638]: _type = "Task" [ 734.934549] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.942443] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833404, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.961409] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-feedcdd4-baa2-48fe-8ec2-e3a2a93afa5c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.970965] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b4b8b3-0d30-449d-a6de-e06cb9ddeff2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.005015] env[68638]: DEBUG nova.compute.manager [req-ed99a0f9-c726-4ee8-80da-a82e31dd0993 req-f33e4137-77fa-4878-820a-6a327e50d8bd service nova] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Detach interface failed, port_id=db439fed-d2ec-4e34-b43e-677c18b30fc9, reason: Instance 24982641-40ec-4fab-8385-1bc9dea6ade1 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 735.044130] env[68638]: DEBUG nova.network.neutron [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Successfully updated port: 8407d492-d594-4996-8547-bfe5c27586e3 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 735.072573] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.840s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.073147] env[68638]: DEBUG nova.compute.manager [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 735.075800] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.248s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 735.077311] env[68638]: INFO nova.compute.claims [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 735.090824] env[68638]: DEBUG oslo_vmware.api [None req-6957260b-0c45-4fd9-ae28-95c919c2fe6f tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Task: {'id': task-2833401, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.282137] env[68638]: INFO nova.compute.manager [-] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Took 1.50 seconds to deallocate network for instance. [ 735.288276] env[68638]: INFO nova.compute.manager [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Took 40.99 seconds to build instance. [ 735.335415] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52cdedc4-94a0-778c-de1f-233193737f2c, 'name': SearchDatastore_Task, 'duration_secs': 0.016932} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.336180] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6766e209-92ab-4dcf-a594-08440e468153 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.342200] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Waiting for the task: (returnval){ [ 735.342200] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]524a5096-842b-6a55-7ec8-c3b600ab3e58" [ 735.342200] env[68638]: _type = "Task" [ 735.342200] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.349850] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]524a5096-842b-6a55-7ec8-c3b600ab3e58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.444347] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833404, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.32984} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.444624] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 735.444808] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 735.444988] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 735.546222] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Acquiring lock "refresh_cache-5294e1b6-f34f-4f91-aa3e-e0276ad982ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.546222] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Acquired lock "refresh_cache-5294e1b6-f34f-4f91-aa3e-e0276ad982ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 735.546354] env[68638]: DEBUG nova.network.neutron [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 735.578778] env[68638]: DEBUG oslo_vmware.api [None req-6957260b-0c45-4fd9-ae28-95c919c2fe6f tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Task: {'id': task-2833401, 'name': ReconfigVM_Task, 'duration_secs': 1.229253} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.579078] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-6957260b-0c45-4fd9-ae28-95c919c2fe6f tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Reconfigured VM instance instance-0000000c to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 735.584026] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52338fe2-3ae0-4dc6-842d-2a844d4b327c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.596651] env[68638]: DEBUG nova.compute.utils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 735.598085] env[68638]: DEBUG nova.compute.manager [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 735.598259] env[68638]: DEBUG nova.network.neutron [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 735.605456] env[68638]: DEBUG oslo_vmware.api [None req-6957260b-0c45-4fd9-ae28-95c919c2fe6f tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Waiting for the task: (returnval){ [ 735.605456] env[68638]: value = "task-2833405" [ 735.605456] env[68638]: _type = "Task" [ 735.605456] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.614992] env[68638]: DEBUG oslo_vmware.api [None req-6957260b-0c45-4fd9-ae28-95c919c2fe6f tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Task: {'id': task-2833405, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.670126] env[68638]: DEBUG nova.policy [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1d2be8827bf74d62b1f6245cdcfd9d8d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dce3b1af81bc4cec877ef5a7e6999a7f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 735.790654] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.791164] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d45e2ca8-86db-4165-a1bd-0b1642f22ae7 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Lock "06a1a44f-35ee-45d2-9503-23468150b72f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.121s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.857117] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]524a5096-842b-6a55-7ec8-c3b600ab3e58, 'name': SearchDatastore_Task, 'duration_secs': 0.009459} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.857117] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 735.857253] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 072be237-c51e-43d2-ad84-46122ef9f335/072be237-c51e-43d2-ad84-46122ef9f335.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 735.857573] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fa225d47-b2d7-4bdd-b341-f11b5d4b7d08 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.868295] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Waiting for the task: (returnval){ [ 735.868295] env[68638]: value = "task-2833406" [ 735.868295] env[68638]: _type = "Task" [ 735.868295] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.876684] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833406, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.086094] env[68638]: DEBUG nova.network.neutron [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Successfully created port: ababd3bc-a199-4001-b965-7ba88550ead8 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 736.104835] env[68638]: DEBUG nova.compute.manager [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 736.113176] env[68638]: DEBUG nova.network.neutron [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 736.122255] env[68638]: DEBUG oslo_vmware.api [None req-6957260b-0c45-4fd9-ae28-95c919c2fe6f tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Task: {'id': task-2833405, 'name': ReconfigVM_Task, 'duration_secs': 0.140864} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.122255] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-6957260b-0c45-4fd9-ae28-95c919c2fe6f tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569853', 'volume_id': '9f8ab47d-9d2e-4915-9bc1-95f0375f5a92', 'name': 'volume-9f8ab47d-9d2e-4915-9bc1-95f0375f5a92', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac', 'attached_at': '', 'detached_at': '', 'volume_id': '9f8ab47d-9d2e-4915-9bc1-95f0375f5a92', 'serial': '9f8ab47d-9d2e-4915-9bc1-95f0375f5a92'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 736.140038] env[68638]: DEBUG nova.compute.manager [req-79b49df6-149b-4d03-98ca-92ad47297ea2 req-77cf5dde-04da-49b2-96df-df5f6c5f9aab service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Received event network-vif-plugged-8407d492-d594-4996-8547-bfe5c27586e3 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 736.140038] env[68638]: DEBUG oslo_concurrency.lockutils [req-79b49df6-149b-4d03-98ca-92ad47297ea2 req-77cf5dde-04da-49b2-96df-df5f6c5f9aab service nova] Acquiring lock "5294e1b6-f34f-4f91-aa3e-e0276ad982ee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 736.140384] env[68638]: DEBUG oslo_concurrency.lockutils [req-79b49df6-149b-4d03-98ca-92ad47297ea2 req-77cf5dde-04da-49b2-96df-df5f6c5f9aab service nova] Lock "5294e1b6-f34f-4f91-aa3e-e0276ad982ee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 736.140488] env[68638]: DEBUG oslo_concurrency.lockutils [req-79b49df6-149b-4d03-98ca-92ad47297ea2 req-77cf5dde-04da-49b2-96df-df5f6c5f9aab service nova] Lock "5294e1b6-f34f-4f91-aa3e-e0276ad982ee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 736.140907] env[68638]: DEBUG nova.compute.manager [req-79b49df6-149b-4d03-98ca-92ad47297ea2 req-77cf5dde-04da-49b2-96df-df5f6c5f9aab service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] No waiting events found dispatching network-vif-plugged-8407d492-d594-4996-8547-bfe5c27586e3 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 736.140907] env[68638]: WARNING nova.compute.manager [req-79b49df6-149b-4d03-98ca-92ad47297ea2 req-77cf5dde-04da-49b2-96df-df5f6c5f9aab service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Received unexpected event network-vif-plugged-8407d492-d594-4996-8547-bfe5c27586e3 for instance with vm_state building and task_state spawning. [ 736.141124] env[68638]: DEBUG nova.compute.manager [req-79b49df6-149b-4d03-98ca-92ad47297ea2 req-77cf5dde-04da-49b2-96df-df5f6c5f9aab service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Received event network-changed-8407d492-d594-4996-8547-bfe5c27586e3 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 736.141335] env[68638]: DEBUG nova.compute.manager [req-79b49df6-149b-4d03-98ca-92ad47297ea2 req-77cf5dde-04da-49b2-96df-df5f6c5f9aab service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Refreshing instance network info cache due to event network-changed-8407d492-d594-4996-8547-bfe5c27586e3. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 736.141532] env[68638]: DEBUG oslo_concurrency.lockutils [req-79b49df6-149b-4d03-98ca-92ad47297ea2 req-77cf5dde-04da-49b2-96df-df5f6c5f9aab service nova] Acquiring lock "refresh_cache-5294e1b6-f34f-4f91-aa3e-e0276ad982ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.300023] env[68638]: DEBUG nova.compute.manager [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 736.384890] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833406, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507324} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.384890] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 072be237-c51e-43d2-ad84-46122ef9f335/072be237-c51e-43d2-ad84-46122ef9f335.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 736.385206] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 736.385297] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d4e7b253-68bf-445a-9f4a-7653bfdd1fc3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.391665] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Waiting for the task: (returnval){ [ 736.391665] env[68638]: value = "task-2833407" [ 736.391665] env[68638]: _type = "Task" [ 736.391665] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.404279] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833407, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.420396] env[68638]: DEBUG nova.network.neutron [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Updating instance_info_cache with network_info: [{"id": "8407d492-d594-4996-8547-bfe5c27586e3", "address": "fa:16:3e:0a:8a:54", "network": {"id": "afb24962-d875-4d95-b711-2f9a3b84dddd", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1720924302-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b63d2e4e9fe24cc1aeb4b1569517ea20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8407d492-d5", "ovs_interfaceid": "8407d492-d594-4996-8547-bfe5c27586e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.486052] env[68638]: DEBUG nova.virt.hardware [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 736.486052] env[68638]: DEBUG nova.virt.hardware [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 736.486052] env[68638]: DEBUG nova.virt.hardware [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 736.486052] env[68638]: DEBUG nova.virt.hardware [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 736.486052] env[68638]: DEBUG nova.virt.hardware [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 736.486052] env[68638]: DEBUG nova.virt.hardware [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 736.486052] env[68638]: DEBUG nova.virt.hardware [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 736.486923] env[68638]: DEBUG nova.virt.hardware [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 736.487270] env[68638]: DEBUG nova.virt.hardware [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 736.487700] env[68638]: DEBUG nova.virt.hardware [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 736.488030] env[68638]: DEBUG nova.virt.hardware [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 736.489039] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4749687a-7f9a-48fd-8ed6-a0a499f7b8de {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.502307] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a23a21-2d1c-4e63-b3e9-0d0c3e27eef0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.516618] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Instance VIF info [] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 736.523229] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 736.527459] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 736.527459] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c7ac439-b0db-4b18-8048-17a2ab041d0c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.546224] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 736.546224] env[68638]: value = "task-2833408" [ 736.546224] env[68638]: _type = "Task" [ 736.546224] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.554194] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833408, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.716486] env[68638]: DEBUG nova.objects.instance [None req-6957260b-0c45-4fd9-ae28-95c919c2fe6f tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Lazy-loading 'flavor' on Instance uuid ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 736.819701] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 736.834498] env[68638]: INFO nova.compute.manager [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Rebuilding instance [ 736.838334] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5813bb4c-9f35-4337-8582-a32e4412d678 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.846111] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711f8bf4-a639-4587-8cd4-eb454d328449 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.882385] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f49cd7e-cf65-467d-ab3a-d63b38109859 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.900988] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a835c1-56e2-4b90-9692-413b6882960a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.910114] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833407, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094193} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.918132] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 736.918697] env[68638]: DEBUG nova.compute.provider_tree [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 736.922757] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b2cfd6-a90e-48a0-a7be-ae3047ff1bc1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.929882] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Releasing lock "refresh_cache-5294e1b6-f34f-4f91-aa3e-e0276ad982ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 736.929882] env[68638]: DEBUG nova.compute.manager [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Instance network_info: |[{"id": "8407d492-d594-4996-8547-bfe5c27586e3", "address": "fa:16:3e:0a:8a:54", "network": {"id": "afb24962-d875-4d95-b711-2f9a3b84dddd", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1720924302-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b63d2e4e9fe24cc1aeb4b1569517ea20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8407d492-d5", "ovs_interfaceid": "8407d492-d594-4996-8547-bfe5c27586e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 736.929882] env[68638]: DEBUG nova.compute.manager [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 736.929882] env[68638]: DEBUG oslo_concurrency.lockutils [req-79b49df6-149b-4d03-98ca-92ad47297ea2 req-77cf5dde-04da-49b2-96df-df5f6c5f9aab service nova] Acquired lock "refresh_cache-5294e1b6-f34f-4f91-aa3e-e0276ad982ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 736.929882] env[68638]: DEBUG nova.network.neutron [req-79b49df6-149b-4d03-98ca-92ad47297ea2 req-77cf5dde-04da-49b2-96df-df5f6c5f9aab service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Refreshing network info cache for port 8407d492-d594-4996-8547-bfe5c27586e3 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 736.929882] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:8a:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8407d492-d594-4996-8547-bfe5c27586e3', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 736.937455] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Creating folder: Project (b63d2e4e9fe24cc1aeb4b1569517ea20). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 736.938669] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a90cd7bc-d0f1-45bb-98d8-3ae4d770c733 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.953907] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f88c83c-653f-47c5-b06b-21803e099828 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.964561] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] 072be237-c51e-43d2-ad84-46122ef9f335/072be237-c51e-43d2-ad84-46122ef9f335.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 736.965388] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8c9b1c6-20f7-46fa-a22e-723fc3e4f509 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.989361] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Waiting for the task: (returnval){ [ 736.989361] env[68638]: value = "task-2833410" [ 736.989361] env[68638]: _type = "Task" [ 736.989361] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.990743] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Created folder: Project (b63d2e4e9fe24cc1aeb4b1569517ea20) in parent group-v569734. [ 736.990934] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Creating folder: Instances. Parent ref: group-v569860. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 736.993998] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2ac87c88-55e2-4b3d-8454-4995775beb54 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.000628] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833410, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.003384] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Created folder: Instances in parent group-v569860. [ 737.003677] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 737.003944] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 737.004310] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-922ff74b-9ea5-4c14-8b71-79797cf8a874 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.027021] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 737.027021] env[68638]: value = "task-2833412" [ 737.027021] env[68638]: _type = "Task" [ 737.027021] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.034415] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833412, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.057310] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833408, 'name': CreateVM_Task, 'duration_secs': 0.305774} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.057491] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 737.057931] env[68638]: DEBUG oslo_concurrency.lockutils [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.058110] env[68638]: DEBUG oslo_concurrency.lockutils [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 737.058439] env[68638]: DEBUG oslo_concurrency.lockutils [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 737.058704] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf041d22-2179-4c6b-92a1-026a22a13f57 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.064598] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 737.064598] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]521be2d4-9f68-78fc-9871-34ee9831a7e4" [ 737.064598] env[68638]: _type = "Task" [ 737.064598] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.072506] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]521be2d4-9f68-78fc-9871-34ee9831a7e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.115619] env[68638]: DEBUG nova.compute.manager [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 737.144927] env[68638]: DEBUG nova.virt.hardware [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 737.145204] env[68638]: DEBUG nova.virt.hardware [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 737.145362] env[68638]: DEBUG nova.virt.hardware [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 737.145540] env[68638]: DEBUG nova.virt.hardware [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 737.145684] env[68638]: DEBUG nova.virt.hardware [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 737.145830] env[68638]: DEBUG nova.virt.hardware [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 737.146047] env[68638]: DEBUG nova.virt.hardware [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 737.146207] env[68638]: DEBUG nova.virt.hardware [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 737.146375] env[68638]: DEBUG nova.virt.hardware [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 737.146536] env[68638]: DEBUG nova.virt.hardware [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 737.146707] env[68638]: DEBUG nova.virt.hardware [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 737.147589] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94cd2a01-0b38-49e3-91bb-8fa6130d8c35 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.155687] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e25c0f-448d-49a0-9e54-e412646f2874 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.427436] env[68638]: DEBUG nova.scheduler.client.report [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 737.500388] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833410, 'name': ReconfigVM_Task, 'duration_secs': 0.29172} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.501375] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Reconfigured VM instance instance-00000029 to attach disk [datastore2] 072be237-c51e-43d2-ad84-46122ef9f335/072be237-c51e-43d2-ad84-46122ef9f335.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 737.501969] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9fd622d9-aa2f-482c-8eee-753113eb5e3d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.508972] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Waiting for the task: (returnval){ [ 737.508972] env[68638]: value = "task-2833413" [ 737.508972] env[68638]: _type = "Task" [ 737.508972] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.517228] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833413, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.532148] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833412, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.546939] env[68638]: DEBUG nova.network.neutron [req-79b49df6-149b-4d03-98ca-92ad47297ea2 req-77cf5dde-04da-49b2-96df-df5f6c5f9aab service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Updated VIF entry in instance network info cache for port 8407d492-d594-4996-8547-bfe5c27586e3. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 737.547302] env[68638]: DEBUG nova.network.neutron [req-79b49df6-149b-4d03-98ca-92ad47297ea2 req-77cf5dde-04da-49b2-96df-df5f6c5f9aab service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Updating instance_info_cache with network_info: [{"id": "8407d492-d594-4996-8547-bfe5c27586e3", "address": "fa:16:3e:0a:8a:54", "network": {"id": "afb24962-d875-4d95-b711-2f9a3b84dddd", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1720924302-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b63d2e4e9fe24cc1aeb4b1569517ea20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8407d492-d5", "ovs_interfaceid": "8407d492-d594-4996-8547-bfe5c27586e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.574397] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]521be2d4-9f68-78fc-9871-34ee9831a7e4, 'name': SearchDatastore_Task, 'duration_secs': 0.009353} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.574712] env[68638]: DEBUG oslo_concurrency.lockutils [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 737.574927] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 737.575175] env[68638]: DEBUG oslo_concurrency.lockutils [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.575322] env[68638]: DEBUG oslo_concurrency.lockutils [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 737.579031] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 737.579031] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-045086d0-73de-4f18-a71c-e8bb63865900 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.584434] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 737.584613] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 737.585358] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2892882-1f14-4d85-aa82-4aaef48740a5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.591234] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 737.591234] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52f0f6bd-4a6f-c1b6-a8d5-2f7bf2ff582a" [ 737.591234] env[68638]: _type = "Task" [ 737.591234] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.598917] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f0f6bd-4a6f-c1b6-a8d5-2f7bf2ff582a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.724123] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6957260b-0c45-4fd9-ae28-95c919c2fe6f tempest-VolumesAssistedSnapshotsTest-2050338336 tempest-VolumesAssistedSnapshotsTest-2050338336-project-admin] Lock "ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.382s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.782185] env[68638]: DEBUG nova.network.neutron [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Successfully updated port: ababd3bc-a199-4001-b965-7ba88550ead8 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 737.937673] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.861s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.938730] env[68638]: DEBUG nova.compute.manager [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 737.941738] env[68638]: DEBUG oslo_concurrency.lockutils [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.893s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.942670] env[68638]: DEBUG oslo_concurrency.lockutils [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.945141] env[68638]: DEBUG oslo_concurrency.lockutils [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.765s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.945389] env[68638]: DEBUG oslo_concurrency.lockutils [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.947597] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.371s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.949039] env[68638]: INFO nova.compute.claims [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 737.980598] env[68638]: INFO nova.scheduler.client.report [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Deleted allocations for instance 7b0b6eec-4681-4926-ad3f-5572e022a467 [ 737.982832] env[68638]: INFO nova.scheduler.client.report [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Deleted allocations for instance aaf0185b-1a85-4e0e-afb1-55e9e2417d76 [ 737.996566] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 737.997888] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9a9ba54c-8574-4661-bdd1-09fecb770199 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.006417] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Waiting for the task: (returnval){ [ 738.006417] env[68638]: value = "task-2833415" [ 738.006417] env[68638]: _type = "Task" [ 738.006417] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.021976] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833413, 'name': Rename_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.022357] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833415, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.036433] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833412, 'name': CreateVM_Task, 'duration_secs': 0.644292} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.036647] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 738.037626] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.037868] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.038259] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 738.038949] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9da83d6b-b509-4077-b082-64eecf5f2833 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.044226] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Waiting for the task: (returnval){ [ 738.044226] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e939c4-616f-bca1-5d41-36e62b36107c" [ 738.044226] env[68638]: _type = "Task" [ 738.044226] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.049572] env[68638]: DEBUG oslo_concurrency.lockutils [req-79b49df6-149b-4d03-98ca-92ad47297ea2 req-77cf5dde-04da-49b2-96df-df5f6c5f9aab service nova] Releasing lock "refresh_cache-5294e1b6-f34f-4f91-aa3e-e0276ad982ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.053083] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e939c4-616f-bca1-5d41-36e62b36107c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.102538] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f0f6bd-4a6f-c1b6-a8d5-2f7bf2ff582a, 'name': SearchDatastore_Task, 'duration_secs': 0.012706} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.103644] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a319daeb-0624-400f-87d9-35774b8a8e9a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.109737] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 738.109737] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]521a9d46-3a29-d778-ca73-230af303b9cd" [ 738.109737] env[68638]: _type = "Task" [ 738.109737] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.119920] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]521a9d46-3a29-d778-ca73-230af303b9cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.168022] env[68638]: DEBUG nova.compute.manager [req-a9a4e8e0-5077-489c-aa3c-daa1b4ded4c4 req-a0715fae-d3a7-48f3-a708-d892ec03b04b service nova] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Received event network-vif-plugged-ababd3bc-a199-4001-b965-7ba88550ead8 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 738.168217] env[68638]: DEBUG oslo_concurrency.lockutils [req-a9a4e8e0-5077-489c-aa3c-daa1b4ded4c4 req-a0715fae-d3a7-48f3-a708-d892ec03b04b service nova] Acquiring lock "8992f062-c28f-4ac8-8d0d-0c51c3784e88-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 738.168293] env[68638]: DEBUG oslo_concurrency.lockutils [req-a9a4e8e0-5077-489c-aa3c-daa1b4ded4c4 req-a0715fae-d3a7-48f3-a708-d892ec03b04b service nova] Lock "8992f062-c28f-4ac8-8d0d-0c51c3784e88-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 738.168527] env[68638]: DEBUG oslo_concurrency.lockutils [req-a9a4e8e0-5077-489c-aa3c-daa1b4ded4c4 req-a0715fae-d3a7-48f3-a708-d892ec03b04b service nova] Lock "8992f062-c28f-4ac8-8d0d-0c51c3784e88-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 738.168628] env[68638]: DEBUG nova.compute.manager [req-a9a4e8e0-5077-489c-aa3c-daa1b4ded4c4 req-a0715fae-d3a7-48f3-a708-d892ec03b04b service nova] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] No waiting events found dispatching network-vif-plugged-ababd3bc-a199-4001-b965-7ba88550ead8 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 738.168737] env[68638]: WARNING nova.compute.manager [req-a9a4e8e0-5077-489c-aa3c-daa1b4ded4c4 req-a0715fae-d3a7-48f3-a708-d892ec03b04b service nova] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Received unexpected event network-vif-plugged-ababd3bc-a199-4001-b965-7ba88550ead8 for instance with vm_state building and task_state spawning. [ 738.168917] env[68638]: DEBUG nova.compute.manager [req-a9a4e8e0-5077-489c-aa3c-daa1b4ded4c4 req-a0715fae-d3a7-48f3-a708-d892ec03b04b service nova] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Received event network-changed-ababd3bc-a199-4001-b965-7ba88550ead8 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 738.169383] env[68638]: DEBUG nova.compute.manager [req-a9a4e8e0-5077-489c-aa3c-daa1b4ded4c4 req-a0715fae-d3a7-48f3-a708-d892ec03b04b service nova] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Refreshing instance network info cache due to event network-changed-ababd3bc-a199-4001-b965-7ba88550ead8. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 738.169666] env[68638]: DEBUG oslo_concurrency.lockutils [req-a9a4e8e0-5077-489c-aa3c-daa1b4ded4c4 req-a0715fae-d3a7-48f3-a708-d892ec03b04b service nova] Acquiring lock "refresh_cache-8992f062-c28f-4ac8-8d0d-0c51c3784e88" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.169849] env[68638]: DEBUG oslo_concurrency.lockutils [req-a9a4e8e0-5077-489c-aa3c-daa1b4ded4c4 req-a0715fae-d3a7-48f3-a708-d892ec03b04b service nova] Acquired lock "refresh_cache-8992f062-c28f-4ac8-8d0d-0c51c3784e88" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.170078] env[68638]: DEBUG nova.network.neutron [req-a9a4e8e0-5077-489c-aa3c-daa1b4ded4c4 req-a0715fae-d3a7-48f3-a708-d892ec03b04b service nova] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Refreshing network info cache for port ababd3bc-a199-4001-b965-7ba88550ead8 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 738.282459] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "refresh_cache-8992f062-c28f-4ac8-8d0d-0c51c3784e88" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.454694] env[68638]: DEBUG nova.compute.utils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 738.461591] env[68638]: DEBUG nova.compute.manager [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 738.461776] env[68638]: DEBUG nova.network.neutron [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 738.502334] env[68638]: DEBUG oslo_concurrency.lockutils [None req-940ce388-89c3-4e43-a7b3-9f18ff96cabb tempest-ServerPasswordTestJSON-675353065 tempest-ServerPasswordTestJSON-675353065-project-member] Lock "7b0b6eec-4681-4926-ad3f-5572e022a467" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.305s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 738.503326] env[68638]: DEBUG oslo_concurrency.lockutils [None req-57cbceb0-87d2-49f3-a643-29f4472c1207 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Lock "aaf0185b-1a85-4e0e-afb1-55e9e2417d76" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.718s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 738.531011] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833413, 'name': Rename_Task, 'duration_secs': 0.864879} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.531278] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833415, 'name': PowerOffVM_Task, 'duration_secs': 0.120723} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.532762] env[68638]: DEBUG nova.policy [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1d2be8827bf74d62b1f6245cdcfd9d8d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dce3b1af81bc4cec877ef5a7e6999a7f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 738.534271] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 738.534536] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 738.535244] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 738.535773] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-468d1822-185a-4177-a99e-d573a4561e6d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.538105] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e139e31f-54e3-47b2-9d57-633b951acb86 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.545834] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 738.550417] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b2e937d-d414-4e2c-8a5a-48152c82c79f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.552221] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Waiting for the task: (returnval){ [ 738.552221] env[68638]: value = "task-2833416" [ 738.552221] env[68638]: _type = "Task" [ 738.552221] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.567039] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e939c4-616f-bca1-5d41-36e62b36107c, 'name': SearchDatastore_Task, 'duration_secs': 0.010462} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.567039] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.567039] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 738.567039] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.568788] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833416, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.577623] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 738.577623] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 738.577623] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Deleting the datastore file [datastore2] 06a1a44f-35ee-45d2-9503-23468150b72f {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 738.577623] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9c6675fd-33ea-4dcf-9822-7cdd30d3f041 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.582965] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Waiting for the task: (returnval){ [ 738.582965] env[68638]: value = "task-2833418" [ 738.582965] env[68638]: _type = "Task" [ 738.582965] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.594507] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833418, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.620798] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]521a9d46-3a29-d778-ca73-230af303b9cd, 'name': SearchDatastore_Task, 'duration_secs': 0.009768} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.620951] env[68638]: DEBUG oslo_concurrency.lockutils [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.621140] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] b9736ec5-6332-4202-95d6-a3cd1d1f11d7/b9736ec5-6332-4202-95d6-a3cd1d1f11d7.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 738.621455] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.621603] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 738.622659] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3300949f-cffe-40aa-a8c9-423b9304b0b1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.624666] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5469226c-3658-40b5-83e3-7070a7e1b5a2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.632154] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 738.632154] env[68638]: value = "task-2833419" [ 738.632154] env[68638]: _type = "Task" [ 738.632154] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.639423] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 738.639632] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 738.640752] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e611ed4-216d-4cbf-9639-a1cfecfb2b21 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.646389] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833419, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.650084] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Waiting for the task: (returnval){ [ 738.650084] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e7dc6e-5332-65ab-60a2-ffa3b03bd111" [ 738.650084] env[68638]: _type = "Task" [ 738.650084] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.657814] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e7dc6e-5332-65ab-60a2-ffa3b03bd111, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.746273] env[68638]: DEBUG nova.network.neutron [req-a9a4e8e0-5077-489c-aa3c-daa1b4ded4c4 req-a0715fae-d3a7-48f3-a708-d892ec03b04b service nova] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 738.927283] env[68638]: DEBUG nova.network.neutron [req-a9a4e8e0-5077-489c-aa3c-daa1b4ded4c4 req-a0715fae-d3a7-48f3-a708-d892ec03b04b service nova] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.962687] env[68638]: DEBUG nova.compute.manager [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 739.066067] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833416, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.097370] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833418, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142859} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.101302] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 739.101302] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 739.101302] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 739.116137] env[68638]: DEBUG nova.network.neutron [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Successfully created port: 906771db-1cab-44ee-b119-40a19f3597df {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 739.143581] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833419, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.161088] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e7dc6e-5332-65ab-60a2-ffa3b03bd111, 'name': SearchDatastore_Task, 'duration_secs': 0.008812} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.165390] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a9c73f9-70ff-4bf5-ae04-7df9a2e33240 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.172374] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Waiting for the task: (returnval){ [ 739.172374] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]525badb7-fcbb-18d4-8cc2-bdf6b0294b61" [ 739.172374] env[68638]: _type = "Task" [ 739.172374] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.185573] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525badb7-fcbb-18d4-8cc2-bdf6b0294b61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.434279] env[68638]: DEBUG oslo_concurrency.lockutils [req-a9a4e8e0-5077-489c-aa3c-daa1b4ded4c4 req-a0715fae-d3a7-48f3-a708-d892ec03b04b service nova] Releasing lock "refresh_cache-8992f062-c28f-4ac8-8d0d-0c51c3784e88" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 739.434279] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquired lock "refresh_cache-8992f062-c28f-4ac8-8d0d-0c51c3784e88" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 739.434279] env[68638]: DEBUG nova.network.neutron [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 739.569751] env[68638]: DEBUG oslo_vmware.api [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833416, 'name': PowerOnVM_Task, 'duration_secs': 0.659454} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.570043] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 739.570249] env[68638]: INFO nova.compute.manager [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Took 8.08 seconds to spawn the instance on the hypervisor. [ 739.570567] env[68638]: DEBUG nova.compute.manager [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 739.571291] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7a6f97a-da08-4963-bf6a-fbf1a27af542 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.644551] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833419, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.59114} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.647955] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] b9736ec5-6332-4202-95d6-a3cd1d1f11d7/b9736ec5-6332-4202-95d6-a3cd1d1f11d7.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 739.648766] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 739.649598] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-93545011-3c01-4103-b8c1-7f8a017102fe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.658168] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 739.658168] env[68638]: value = "task-2833420" [ 739.658168] env[68638]: _type = "Task" [ 739.658168] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.672308] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833420, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.683632] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525badb7-fcbb-18d4-8cc2-bdf6b0294b61, 'name': SearchDatastore_Task, 'duration_secs': 0.038418} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.683632] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 739.683632] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 5294e1b6-f34f-4f91-aa3e-e0276ad982ee/5294e1b6-f34f-4f91-aa3e-e0276ad982ee.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 739.683632] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-91f7a33d-8ef6-4c81-94c9-2ef034a53c3a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.696390] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Waiting for the task: (returnval){ [ 739.696390] env[68638]: value = "task-2833421" [ 739.696390] env[68638]: _type = "Task" [ 739.696390] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.705873] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833421, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.739079] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c21d1a49-c01f-49b4-9e62-ed9cfefb8404 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.746949] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88300243-2de3-4b42-9f39-3b132195a90a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.783971] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a6a9b2-b280-4a67-b02a-f5af378bfbc6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.791716] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b6e3f56-1e47-4b19-960e-a0c95081a9fa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.811343] env[68638]: DEBUG nova.compute.provider_tree [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 739.989246] env[68638]: DEBUG nova.compute.manager [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 740.024227] env[68638]: DEBUG nova.network.neutron [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 740.031553] env[68638]: DEBUG nova.virt.hardware [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 740.031795] env[68638]: DEBUG nova.virt.hardware [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 740.031952] env[68638]: DEBUG nova.virt.hardware [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 740.032147] env[68638]: DEBUG nova.virt.hardware [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 740.032294] env[68638]: DEBUG nova.virt.hardware [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 740.032442] env[68638]: DEBUG nova.virt.hardware [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 740.032666] env[68638]: DEBUG nova.virt.hardware [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 740.032849] env[68638]: DEBUG nova.virt.hardware [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 740.033034] env[68638]: DEBUG nova.virt.hardware [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 740.033201] env[68638]: DEBUG nova.virt.hardware [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 740.033375] env[68638]: DEBUG nova.virt.hardware [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 740.034341] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-270e22a4-452a-4fac-b66c-0c8ad1658b23 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.043337] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd322d9-3c6d-472e-8231-e772ef4e40a5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.103876] env[68638]: INFO nova.compute.manager [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Took 43.35 seconds to build instance. [ 740.150404] env[68638]: DEBUG nova.virt.hardware [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 740.151094] env[68638]: DEBUG nova.virt.hardware [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 740.151094] env[68638]: DEBUG nova.virt.hardware [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 740.151094] env[68638]: DEBUG nova.virt.hardware [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 740.151210] env[68638]: DEBUG nova.virt.hardware [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 740.151356] env[68638]: DEBUG nova.virt.hardware [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 740.151568] env[68638]: DEBUG nova.virt.hardware [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 740.151741] env[68638]: DEBUG nova.virt.hardware [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 740.151948] env[68638]: DEBUG nova.virt.hardware [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 740.152414] env[68638]: DEBUG nova.virt.hardware [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 740.152886] env[68638]: DEBUG nova.virt.hardware [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 740.153659] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a6d23ee-2fe3-4b50-8f19-efbf53680687 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.166406] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cfb0b52-ba14-4f3f-9099-d48bb0b686f7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.173731] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833420, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069013} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.174449] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 740.175323] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df5222e-a39c-446d-a972-3c0a6a8162eb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.186017] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Instance VIF info [] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 740.191848] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 740.195224] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 740.195521] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-46e4f2cd-b7ee-4cd2-9b54-ff6460fa0a53 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.226080] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Reconfiguring VM instance instance-00000027 to attach disk [datastore2] b9736ec5-6332-4202-95d6-a3cd1d1f11d7/b9736ec5-6332-4202-95d6-a3cd1d1f11d7.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 740.229927] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55cdd30b-5404-4aae-91c0-935cbaa5b4af {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.246756] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 740.246756] env[68638]: value = "task-2833422" [ 740.246756] env[68638]: _type = "Task" [ 740.246756] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.253918] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833421, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464286} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.255373] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 5294e1b6-f34f-4f91-aa3e-e0276ad982ee/5294e1b6-f34f-4f91-aa3e-e0276ad982ee.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 740.255728] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 740.255978] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 740.255978] env[68638]: value = "task-2833423" [ 740.255978] env[68638]: _type = "Task" [ 740.255978] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.256248] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b4dc7622-fa92-4b5c-b668-6045d50590b9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.263844] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833422, 'name': CreateVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.270964] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833423, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.273078] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Waiting for the task: (returnval){ [ 740.273078] env[68638]: value = "task-2833424" [ 740.273078] env[68638]: _type = "Task" [ 740.273078] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.283798] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833424, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.343470] env[68638]: ERROR nova.scheduler.client.report [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [req-a9757228-1af2-47b8-bbad-5c6fa241a90d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a9757228-1af2-47b8-bbad-5c6fa241a90d"}]} [ 740.367976] env[68638]: DEBUG nova.scheduler.client.report [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 740.385243] env[68638]: DEBUG nova.scheduler.client.report [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 740.385487] env[68638]: DEBUG nova.compute.provider_tree [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 740.405636] env[68638]: DEBUG nova.scheduler.client.report [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 740.410107] env[68638]: DEBUG nova.network.neutron [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Updating instance_info_cache with network_info: [{"id": "ababd3bc-a199-4001-b965-7ba88550ead8", "address": "fa:16:3e:c5:ab:75", "network": {"id": "08d42292-b840-4bc6-bed8-5ccd8a3a2a29", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1723671578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dce3b1af81bc4cec877ef5a7e6999a7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3836fb52-19c6-4e10-a0ca-f0bca73dc887", "external-id": "nsx-vlan-transportzone-964", "segmentation_id": 964, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapababd3bc-a1", "ovs_interfaceid": "ababd3bc-a199-4001-b965-7ba88550ead8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.430519] env[68638]: DEBUG nova.scheduler.client.report [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 740.607621] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d362da14-9e0d-4d87-b17f-7ceb5e357ca7 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Lock "072be237-c51e-43d2-ad84-46122ef9f335" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.916s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 740.772387] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833422, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.782628] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833423, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.790986] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833424, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.195894} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.790986] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 740.792110] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e667c2e-15ec-4b77-bd68-f1a4f66df9e3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.817194] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Reconfiguring VM instance instance-0000002a to attach disk [datastore2] 5294e1b6-f34f-4f91-aa3e-e0276ad982ee/5294e1b6-f34f-4f91-aa3e-e0276ad982ee.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 740.820239] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5749f51-2563-44d4-a8ab-8fddddad28ef {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.844174] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Waiting for the task: (returnval){ [ 740.844174] env[68638]: value = "task-2833425" [ 740.844174] env[68638]: _type = "Task" [ 740.844174] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.851453] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833425, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.914198] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Releasing lock "refresh_cache-8992f062-c28f-4ac8-8d0d-0c51c3784e88" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 740.916017] env[68638]: DEBUG nova.compute.manager [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Instance network_info: |[{"id": "ababd3bc-a199-4001-b965-7ba88550ead8", "address": "fa:16:3e:c5:ab:75", "network": {"id": "08d42292-b840-4bc6-bed8-5ccd8a3a2a29", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1723671578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dce3b1af81bc4cec877ef5a7e6999a7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3836fb52-19c6-4e10-a0ca-f0bca73dc887", "external-id": "nsx-vlan-transportzone-964", "segmentation_id": 964, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapababd3bc-a1", "ovs_interfaceid": "ababd3bc-a199-4001-b965-7ba88550ead8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 740.916017] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:ab:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3836fb52-19c6-4e10-a0ca-f0bca73dc887', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ababd3bc-a199-4001-b965-7ba88550ead8', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 740.925989] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 740.926264] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 740.927185] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c41b3046-aecc-486a-9034-9151552d16de {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.950669] env[68638]: DEBUG nova.compute.manager [req-86cf6841-f283-4a00-ab02-695744eb25df req-e11f8164-7559-4408-a6d8-da8606496605 service nova] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Received event network-vif-plugged-906771db-1cab-44ee-b119-40a19f3597df {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 740.950964] env[68638]: DEBUG oslo_concurrency.lockutils [req-86cf6841-f283-4a00-ab02-695744eb25df req-e11f8164-7559-4408-a6d8-da8606496605 service nova] Acquiring lock "94a33fcd-69b6-443b-9c86-5129e30b5b0d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 740.951238] env[68638]: DEBUG oslo_concurrency.lockutils [req-86cf6841-f283-4a00-ab02-695744eb25df req-e11f8164-7559-4408-a6d8-da8606496605 service nova] Lock "94a33fcd-69b6-443b-9c86-5129e30b5b0d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 740.951436] env[68638]: DEBUG oslo_concurrency.lockutils [req-86cf6841-f283-4a00-ab02-695744eb25df req-e11f8164-7559-4408-a6d8-da8606496605 service nova] Lock "94a33fcd-69b6-443b-9c86-5129e30b5b0d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 740.951619] env[68638]: DEBUG nova.compute.manager [req-86cf6841-f283-4a00-ab02-695744eb25df req-e11f8164-7559-4408-a6d8-da8606496605 service nova] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] No waiting events found dispatching network-vif-plugged-906771db-1cab-44ee-b119-40a19f3597df {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 740.951782] env[68638]: WARNING nova.compute.manager [req-86cf6841-f283-4a00-ab02-695744eb25df req-e11f8164-7559-4408-a6d8-da8606496605 service nova] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Received unexpected event network-vif-plugged-906771db-1cab-44ee-b119-40a19f3597df for instance with vm_state building and task_state spawning. [ 740.959828] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 740.959828] env[68638]: value = "task-2833426" [ 740.959828] env[68638]: _type = "Task" [ 740.959828] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.977479] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833426, 'name': CreateVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.104289] env[68638]: DEBUG nova.network.neutron [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Successfully updated port: 906771db-1cab-44ee-b119-40a19f3597df {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 741.117914] env[68638]: DEBUG nova.compute.manager [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 741.272172] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833422, 'name': CreateVM_Task, 'duration_secs': 0.639761} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.275225] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 741.275600] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833423, 'name': ReconfigVM_Task, 'duration_secs': 0.747199} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.277715] env[68638]: DEBUG oslo_concurrency.lockutils [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.277715] env[68638]: DEBUG oslo_concurrency.lockutils [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 741.277715] env[68638]: DEBUG oslo_concurrency.lockutils [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 741.277715] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Reconfigured VM instance instance-00000027 to attach disk [datastore2] b9736ec5-6332-4202-95d6-a3cd1d1f11d7/b9736ec5-6332-4202-95d6-a3cd1d1f11d7.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 741.278575] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c54f7df-d35e-4437-bf5f-3434310088d8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.281095] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da54ea97-995e-4622-9017-acf7a6e4974d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.282772] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-82acad43-63aa-4e2d-89b1-d157a8d3446b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.289617] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Waiting for the task: (returnval){ [ 741.289617] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52a324f4-a8e6-707f-9970-95130a87acbd" [ 741.289617] env[68638]: _type = "Task" [ 741.289617] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.294597] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b521a38f-a8e4-4f8b-bfc3-8450b348b214 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.297986] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 741.297986] env[68638]: value = "task-2833427" [ 741.297986] env[68638]: _type = "Task" [ 741.297986] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.332366] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a324f4-a8e6-707f-9970-95130a87acbd, 'name': SearchDatastore_Task, 'duration_secs': 0.011136} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.333583] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-694d1be1-3afa-43ec-8405-0705b0a7733c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.337580] env[68638]: DEBUG oslo_concurrency.lockutils [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 741.337580] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 741.337580] env[68638]: DEBUG oslo_concurrency.lockutils [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.337580] env[68638]: DEBUG oslo_concurrency.lockutils [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 741.337770] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 741.340956] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8dce8acc-6a02-4c6b-a845-0751698e7107 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.342825] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833427, 'name': Rename_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.350302] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a21ead22-2adb-4789-9711-ff3f8e782622 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.357351] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833425, 'name': ReconfigVM_Task, 'duration_secs': 0.294515} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.357974] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Reconfigured VM instance instance-0000002a to attach disk [datastore2] 5294e1b6-f34f-4f91-aa3e-e0276ad982ee/5294e1b6-f34f-4f91-aa3e-e0276ad982ee.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 741.358622] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0f97c2a1-c449-4850-98f4-e82289c1c8e6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.368218] env[68638]: DEBUG nova.compute.provider_tree [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 741.371806] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 741.372037] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 741.372820] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8712d84d-ceb3-41ed-9d12-403335470929 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.378030] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Waiting for the task: (returnval){ [ 741.378030] env[68638]: value = "task-2833431" [ 741.378030] env[68638]: _type = "Task" [ 741.378030] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.379997] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Waiting for the task: (returnval){ [ 741.379997] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52450b17-fe7f-a14d-8785-ca902b930a3e" [ 741.379997] env[68638]: _type = "Task" [ 741.379997] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.391178] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833431, 'name': Rename_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.394665] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52450b17-fe7f-a14d-8785-ca902b930a3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.467649] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f6f81ae-79b2-4c10-b7d9-2ce21414553e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.475681] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833426, 'name': CreateVM_Task, 'duration_secs': 0.422593} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.477269] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 741.477554] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-112b16b5-4c4d-4241-b21a-f41c3853b584 tempest-ServersAdminNegativeTestJSON-452856394 tempest-ServersAdminNegativeTestJSON-452856394-project-admin] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Suspending the VM {{(pid=68638) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 741.478280] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.478412] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 741.478705] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 741.478930] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-ec06b811-bd0f-41d3-9209-0c0dead285c6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.480444] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1f8e52a-6aef-4fa4-8e83-8a62bbfb4bc9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.485682] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 741.485682] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52a5d43d-2570-7ad1-a8f4-9fbd1d2c0341" [ 741.485682] env[68638]: _type = "Task" [ 741.485682] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.486603] env[68638]: DEBUG oslo_vmware.api [None req-112b16b5-4c4d-4241-b21a-f41c3853b584 tempest-ServersAdminNegativeTestJSON-452856394 tempest-ServersAdminNegativeTestJSON-452856394-project-admin] Waiting for the task: (returnval){ [ 741.486603] env[68638]: value = "task-2833432" [ 741.486603] env[68638]: _type = "Task" [ 741.486603] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.497953] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a5d43d-2570-7ad1-a8f4-9fbd1d2c0341, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.502021] env[68638]: DEBUG oslo_vmware.api [None req-112b16b5-4c4d-4241-b21a-f41c3853b584 tempest-ServersAdminNegativeTestJSON-452856394 tempest-ServersAdminNegativeTestJSON-452856394-project-admin] Task: {'id': task-2833432, 'name': SuspendVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.558455] env[68638]: DEBUG oslo_concurrency.lockutils [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Acquiring lock "ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.558742] env[68638]: DEBUG oslo_concurrency.lockutils [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Lock "ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.558994] env[68638]: DEBUG oslo_concurrency.lockutils [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Acquiring lock "ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.559212] env[68638]: DEBUG oslo_concurrency.lockutils [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Lock "ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.559391] env[68638]: DEBUG oslo_concurrency.lockutils [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Lock "ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 741.561770] env[68638]: INFO nova.compute.manager [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Terminating instance [ 741.607302] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "refresh_cache-94a33fcd-69b6-443b-9c86-5129e30b5b0d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.607302] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquired lock "refresh_cache-94a33fcd-69b6-443b-9c86-5129e30b5b0d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 741.607302] env[68638]: DEBUG nova.network.neutron [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 741.645954] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.809973] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833427, 'name': Rename_Task, 'duration_secs': 0.215021} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.810331] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 741.810706] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c6856723-2577-4ce8-83fb-c99348818e93 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.817043] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 741.817043] env[68638]: value = "task-2833433" [ 741.817043] env[68638]: _type = "Task" [ 741.817043] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.825654] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833433, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.897343] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833431, 'name': Rename_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.897659] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52450b17-fe7f-a14d-8785-ca902b930a3e, 'name': SearchDatastore_Task, 'duration_secs': 0.018385} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.898393] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73adeb10-2d28-4c99-bb0a-e503f42904f9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.905118] env[68638]: DEBUG nova.scheduler.client.report [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 69 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 741.905118] env[68638]: DEBUG nova.compute.provider_tree [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 69 to 70 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 741.905118] env[68638]: DEBUG nova.compute.provider_tree [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 741.907888] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Waiting for the task: (returnval){ [ 741.907888] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52976458-34ae-b096-d366-d9859e128ce3" [ 741.907888] env[68638]: _type = "Task" [ 741.907888] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.918321] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52976458-34ae-b096-d366-d9859e128ce3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.008141] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a5d43d-2570-7ad1-a8f4-9fbd1d2c0341, 'name': SearchDatastore_Task, 'duration_secs': 0.015644} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.015193] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 742.015396] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 742.015644] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.016060] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 742.016287] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 742.016604] env[68638]: DEBUG oslo_vmware.api [None req-112b16b5-4c4d-4241-b21a-f41c3853b584 tempest-ServersAdminNegativeTestJSON-452856394 tempest-ServersAdminNegativeTestJSON-452856394-project-admin] Task: {'id': task-2833432, 'name': SuspendVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.016868] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4cc01a9d-1ed7-4f8c-82a2-c070312f8e94 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.026368] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 742.028667] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 742.028667] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a325c3c4-2af3-44e0-b651-c9382d3a9325 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.033922] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 742.033922] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52c237f3-ca26-09ad-b902-659a2e3533c6" [ 742.033922] env[68638]: _type = "Task" [ 742.033922] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.042597] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52c237f3-ca26-09ad-b902-659a2e3533c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.066017] env[68638]: DEBUG nova.compute.manager [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 742.066258] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 742.067360] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f129ff-ae3f-44c3-8130-d44280fe9374 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.074694] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 742.074943] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b53adb6f-88c2-4e8c-adc2-bcad8518950f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.081190] env[68638]: DEBUG oslo_vmware.api [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Waiting for the task: (returnval){ [ 742.081190] env[68638]: value = "task-2833434" [ 742.081190] env[68638]: _type = "Task" [ 742.081190] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.089993] env[68638]: DEBUG oslo_vmware.api [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Task: {'id': task-2833434, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.149272] env[68638]: DEBUG nova.network.neutron [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 742.315882] env[68638]: DEBUG nova.network.neutron [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Updating instance_info_cache with network_info: [{"id": "906771db-1cab-44ee-b119-40a19f3597df", "address": "fa:16:3e:b6:38:e0", "network": {"id": "08d42292-b840-4bc6-bed8-5ccd8a3a2a29", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1723671578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dce3b1af81bc4cec877ef5a7e6999a7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3836fb52-19c6-4e10-a0ca-f0bca73dc887", "external-id": "nsx-vlan-transportzone-964", "segmentation_id": 964, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap906771db-1c", "ovs_interfaceid": "906771db-1cab-44ee-b119-40a19f3597df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.327568] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833433, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.391757] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833431, 'name': Rename_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.410368] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.463s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.410946] env[68638]: DEBUG nova.compute.manager [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 742.416123] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 31.026s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.416344] env[68638]: DEBUG nova.objects.instance [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68638) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 742.438300] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52976458-34ae-b096-d366-d9859e128ce3, 'name': SearchDatastore_Task, 'duration_secs': 0.019159} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.438825] env[68638]: DEBUG oslo_concurrency.lockutils [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 742.439670] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 06a1a44f-35ee-45d2-9503-23468150b72f/06a1a44f-35ee-45d2-9503-23468150b72f.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 742.440083] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-926342f2-6426-42db-96f3-7a509efabd40 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.451022] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Waiting for the task: (returnval){ [ 742.451022] env[68638]: value = "task-2833435" [ 742.451022] env[68638]: _type = "Task" [ 742.451022] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.455798] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833435, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.507527] env[68638]: DEBUG oslo_vmware.api [None req-112b16b5-4c4d-4241-b21a-f41c3853b584 tempest-ServersAdminNegativeTestJSON-452856394 tempest-ServersAdminNegativeTestJSON-452856394-project-admin] Task: {'id': task-2833432, 'name': SuspendVM_Task, 'duration_secs': 0.688634} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.507820] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-112b16b5-4c4d-4241-b21a-f41c3853b584 tempest-ServersAdminNegativeTestJSON-452856394 tempest-ServersAdminNegativeTestJSON-452856394-project-admin] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Suspended the VM {{(pid=68638) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 742.507993] env[68638]: DEBUG nova.compute.manager [None req-112b16b5-4c4d-4241-b21a-f41c3853b584 tempest-ServersAdminNegativeTestJSON-452856394 tempest-ServersAdminNegativeTestJSON-452856394-project-admin] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 742.508974] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb07e3ba-763b-492f-9612-0eb7ed5b8afd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.544641] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52c237f3-ca26-09ad-b902-659a2e3533c6, 'name': SearchDatastore_Task, 'duration_secs': 0.007976} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.545436] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbd3fbfa-261a-4090-8a43-743a7eb63a97 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.551135] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 742.551135] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d76112-1b6e-01bb-579f-f7eb68658955" [ 742.551135] env[68638]: _type = "Task" [ 742.551135] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.559548] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d76112-1b6e-01bb-579f-f7eb68658955, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.591556] env[68638]: DEBUG oslo_vmware.api [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Task: {'id': task-2833434, 'name': PowerOffVM_Task, 'duration_secs': 0.159919} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.592282] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 742.592282] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 742.592389] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b9b3c0f7-6a66-4163-9d7e-bee71f470cec {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.818916] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Releasing lock "refresh_cache-94a33fcd-69b6-443b-9c86-5129e30b5b0d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 742.819233] env[68638]: DEBUG nova.compute.manager [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Instance network_info: |[{"id": "906771db-1cab-44ee-b119-40a19f3597df", "address": "fa:16:3e:b6:38:e0", "network": {"id": "08d42292-b840-4bc6-bed8-5ccd8a3a2a29", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1723671578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dce3b1af81bc4cec877ef5a7e6999a7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3836fb52-19c6-4e10-a0ca-f0bca73dc887", "external-id": "nsx-vlan-transportzone-964", "segmentation_id": 964, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap906771db-1c", "ovs_interfaceid": "906771db-1cab-44ee-b119-40a19f3597df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 742.819808] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:38:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3836fb52-19c6-4e10-a0ca-f0bca73dc887', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '906771db-1cab-44ee-b119-40a19f3597df', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 742.827847] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 742.831972] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 742.832258] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6f2ac74e-b3a4-44f5-b144-5ca5cdea7100 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.853818] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833433, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.855340] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 742.855340] env[68638]: value = "task-2833437" [ 742.855340] env[68638]: _type = "Task" [ 742.855340] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.863300] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833437, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.891563] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833431, 'name': Rename_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.936120] env[68638]: DEBUG nova.compute.utils [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 742.937665] env[68638]: DEBUG nova.compute.manager [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 742.937742] env[68638]: DEBUG nova.network.neutron [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 742.960135] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833435, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.993364] env[68638]: DEBUG nova.policy [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fdfdcc628e4e40b586b0b71bc0ed5b19', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d2c1dcc55dd42c5b791dd8f1841479b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 743.022250] env[68638]: DEBUG nova.compute.manager [req-17fa0ed5-c5ca-434f-a6c4-0cdaed89e724 req-546b22bb-ae71-44a3-a1df-930ea5820c0a service nova] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Received event network-changed-906771db-1cab-44ee-b119-40a19f3597df {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 743.022250] env[68638]: DEBUG nova.compute.manager [req-17fa0ed5-c5ca-434f-a6c4-0cdaed89e724 req-546b22bb-ae71-44a3-a1df-930ea5820c0a service nova] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Refreshing instance network info cache due to event network-changed-906771db-1cab-44ee-b119-40a19f3597df. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 743.022250] env[68638]: DEBUG oslo_concurrency.lockutils [req-17fa0ed5-c5ca-434f-a6c4-0cdaed89e724 req-546b22bb-ae71-44a3-a1df-930ea5820c0a service nova] Acquiring lock "refresh_cache-94a33fcd-69b6-443b-9c86-5129e30b5b0d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.022510] env[68638]: DEBUG oslo_concurrency.lockutils [req-17fa0ed5-c5ca-434f-a6c4-0cdaed89e724 req-546b22bb-ae71-44a3-a1df-930ea5820c0a service nova] Acquired lock "refresh_cache-94a33fcd-69b6-443b-9c86-5129e30b5b0d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 743.022758] env[68638]: DEBUG nova.network.neutron [req-17fa0ed5-c5ca-434f-a6c4-0cdaed89e724 req-546b22bb-ae71-44a3-a1df-930ea5820c0a service nova] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Refreshing network info cache for port 906771db-1cab-44ee-b119-40a19f3597df {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 743.065513] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Acquiring lock "a98f0c63-d327-47b9-b0c2-f7790f1ae87d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.065890] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Lock "a98f0c63-d327-47b9-b0c2-f7790f1ae87d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.066517] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d76112-1b6e-01bb-579f-f7eb68658955, 'name': SearchDatastore_Task, 'duration_secs': 0.009099} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.066699] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 743.067167] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 8992f062-c28f-4ac8-8d0d-0c51c3784e88/8992f062-c28f-4ac8-8d0d-0c51c3784e88.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 743.067485] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-41605aad-f611-47b7-a4a9-003084361c67 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.074506] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 743.074506] env[68638]: value = "task-2833438" [ 743.074506] env[68638]: _type = "Task" [ 743.074506] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.086926] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833438, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.310125] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 743.310555] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 743.310555] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Deleting the datastore file [datastore2] ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 743.310825] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7fbaa630-88cb-4f8c-9a47-4f4cc9418c87 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.325460] env[68638]: DEBUG oslo_vmware.api [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Waiting for the task: (returnval){ [ 743.325460] env[68638]: value = "task-2833439" [ 743.325460] env[68638]: _type = "Task" [ 743.325460] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.332975] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833433, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.335976] env[68638]: DEBUG oslo_vmware.api [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Task: {'id': task-2833439, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.350276] env[68638]: DEBUG nova.network.neutron [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Successfully created port: 53351f41-4f72-4547-8bc4-8949546128c2 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 743.366276] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833437, 'name': CreateVM_Task} progress is 15%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.397101] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833431, 'name': Rename_Task, 'duration_secs': 1.942526} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.397454] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 743.397671] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7dc0f131-6366-44b1-b728-f79a7b4bf1c7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.404740] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Waiting for the task: (returnval){ [ 743.404740] env[68638]: value = "task-2833440" [ 743.404740] env[68638]: _type = "Task" [ 743.404740] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.413725] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833440, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.441792] env[68638]: DEBUG nova.compute.manager [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 743.446381] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a6855806-c75f-40e7-b66d-bc47ee4654ee tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.030s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 743.447727] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.527s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.449576] env[68638]: INFO nova.compute.claims [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 743.472144] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833435, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.516566} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.472144] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 06a1a44f-35ee-45d2-9503-23468150b72f/06a1a44f-35ee-45d2-9503-23468150b72f.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 743.472480] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 743.472931] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-685047f9-82fc-4d7f-b246-83f7873ac009 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.482725] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Waiting for the task: (returnval){ [ 743.482725] env[68638]: value = "task-2833441" [ 743.482725] env[68638]: _type = "Task" [ 743.482725] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.497047] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833441, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.591143] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833438, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.837254] env[68638]: DEBUG oslo_vmware.api [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833433, 'name': PowerOnVM_Task, 'duration_secs': 1.518513} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.840496] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 743.840779] env[68638]: DEBUG nova.compute.manager [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 743.841139] env[68638]: DEBUG oslo_vmware.api [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Task: {'id': task-2833439, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1931} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.841882] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024439d8-0f64-493a-b34b-aad4650af74c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.846097] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 743.846097] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 743.846097] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 743.846097] env[68638]: INFO nova.compute.manager [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Took 1.78 seconds to destroy the instance on the hypervisor. [ 743.846097] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 743.846097] env[68638]: DEBUG nova.compute.manager [-] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 743.846097] env[68638]: DEBUG nova.network.neutron [-] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 743.873425] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833437, 'name': CreateVM_Task, 'duration_secs': 0.935178} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.873602] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 743.874278] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.874467] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 743.874798] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 743.875126] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2caef02f-d7ca-413c-bd72-4193dd6d080f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.882581] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 743.882581] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d4ed25-6998-1a6f-5c7d-0e204b0e06c4" [ 743.882581] env[68638]: _type = "Task" [ 743.882581] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.892935] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d4ed25-6998-1a6f-5c7d-0e204b0e06c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.910744] env[68638]: DEBUG nova.network.neutron [req-17fa0ed5-c5ca-434f-a6c4-0cdaed89e724 req-546b22bb-ae71-44a3-a1df-930ea5820c0a service nova] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Updated VIF entry in instance network info cache for port 906771db-1cab-44ee-b119-40a19f3597df. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 743.910990] env[68638]: DEBUG nova.network.neutron [req-17fa0ed5-c5ca-434f-a6c4-0cdaed89e724 req-546b22bb-ae71-44a3-a1df-930ea5820c0a service nova] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Updating instance_info_cache with network_info: [{"id": "906771db-1cab-44ee-b119-40a19f3597df", "address": "fa:16:3e:b6:38:e0", "network": {"id": "08d42292-b840-4bc6-bed8-5ccd8a3a2a29", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1723671578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dce3b1af81bc4cec877ef5a7e6999a7f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3836fb52-19c6-4e10-a0ca-f0bca73dc887", "external-id": "nsx-vlan-transportzone-964", "segmentation_id": 964, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap906771db-1c", "ovs_interfaceid": "906771db-1cab-44ee-b119-40a19f3597df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.918401] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833440, 'name': PowerOnVM_Task} progress is 71%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.994139] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833441, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081445} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.994596] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 743.995550] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bab36b3-a3d5-4314-a3a5-b75a0ba36f68 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.019081] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] 06a1a44f-35ee-45d2-9503-23468150b72f/06a1a44f-35ee-45d2-9503-23468150b72f.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 744.019848] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b4473881-91ff-4f6f-a7b9-bcf38f9605ae {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.046491] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Waiting for the task: (returnval){ [ 744.046491] env[68638]: value = "task-2833442" [ 744.046491] env[68638]: _type = "Task" [ 744.046491] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.066284] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833442, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.094919] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833438, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.375084] env[68638]: DEBUG oslo_concurrency.lockutils [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 744.396190] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d4ed25-6998-1a6f-5c7d-0e204b0e06c4, 'name': SearchDatastore_Task, 'duration_secs': 0.020353} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.396705] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 744.396950] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 744.397473] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.397673] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 744.397862] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 744.399106] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a2a1daa-0b79-40c4-98f8-770000793e8a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.407015] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 744.407222] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 744.412221] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6b0f592-70d1-4ac3-8276-b6141b65d670 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.419811] env[68638]: DEBUG oslo_concurrency.lockutils [req-17fa0ed5-c5ca-434f-a6c4-0cdaed89e724 req-546b22bb-ae71-44a3-a1df-930ea5820c0a service nova] Releasing lock "refresh_cache-94a33fcd-69b6-443b-9c86-5129e30b5b0d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 744.420215] env[68638]: DEBUG oslo_vmware.api [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833440, 'name': PowerOnVM_Task, 'duration_secs': 0.764801} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.421523] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 744.421694] env[68638]: INFO nova.compute.manager [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Took 10.15 seconds to spawn the instance on the hypervisor. [ 744.421907] env[68638]: DEBUG nova.compute.manager [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 744.422260] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 744.422260] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]529089aa-26ce-a1ea-6b5a-243f911ec3be" [ 744.422260] env[68638]: _type = "Task" [ 744.422260] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.423025] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6481a7b0-5435-407f-a721-c5a1ec3b9318 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.443790] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]529089aa-26ce-a1ea-6b5a-243f911ec3be, 'name': SearchDatastore_Task, 'duration_secs': 0.009172} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.444868] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee7a0260-1a8c-4287-8495-c0b9903f1c81 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.450438] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 744.450438] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52b5148b-7a01-f52a-aacf-720a156f15e7" [ 744.450438] env[68638]: _type = "Task" [ 744.450438] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.458600] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b5148b-7a01-f52a-aacf-720a156f15e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.462087] env[68638]: DEBUG nova.compute.manager [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 744.492188] env[68638]: DEBUG nova.virt.hardware [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 744.492188] env[68638]: DEBUG nova.virt.hardware [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 744.492188] env[68638]: DEBUG nova.virt.hardware [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 744.492188] env[68638]: DEBUG nova.virt.hardware [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 744.492188] env[68638]: DEBUG nova.virt.hardware [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 744.492188] env[68638]: DEBUG nova.virt.hardware [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 744.492188] env[68638]: DEBUG nova.virt.hardware [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 744.492188] env[68638]: DEBUG nova.virt.hardware [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 744.492188] env[68638]: DEBUG nova.virt.hardware [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 744.492188] env[68638]: DEBUG nova.virt.hardware [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 744.492188] env[68638]: DEBUG nova.virt.hardware [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 744.493324] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-906ab1b6-213c-4919-a557-2636bf294004 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.506404] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee217766-e0dd-4b34-96d3-de3940ca2a21 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.560582] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833442, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.594769] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833438, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.737830] env[68638]: DEBUG nova.compute.manager [req-04ef7735-8eb6-4eca-b17d-73ca41819c43 req-dbd85ec8-0866-4ee2-9ecb-68865aa43cde service nova] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Received event network-vif-deleted-93a70134-a907-41d4-bce1-2bf7496b23bb {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 744.737830] env[68638]: INFO nova.compute.manager [req-04ef7735-8eb6-4eca-b17d-73ca41819c43 req-dbd85ec8-0866-4ee2-9ecb-68865aa43cde service nova] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Neutron deleted interface 93a70134-a907-41d4-bce1-2bf7496b23bb; detaching it from the instance and deleting it from the info cache [ 744.737830] env[68638]: DEBUG nova.network.neutron [req-04ef7735-8eb6-4eca-b17d-73ca41819c43 req-dbd85ec8-0866-4ee2-9ecb-68865aa43cde service nova] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.965931] env[68638]: INFO nova.compute.manager [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Took 47.76 seconds to build instance. [ 744.974341] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b5148b-7a01-f52a-aacf-720a156f15e7, 'name': SearchDatastore_Task, 'duration_secs': 0.00911} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.977585] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 744.977585] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 94a33fcd-69b6-443b-9c86-5129e30b5b0d/94a33fcd-69b6-443b-9c86-5129e30b5b0d.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 744.978665] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de41e999-0526-470a-be51-c150448dd7b3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.986966] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 744.986966] env[68638]: value = "task-2833444" [ 744.986966] env[68638]: _type = "Task" [ 744.986966] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.999008] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833444, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.037886] env[68638]: DEBUG nova.network.neutron [-] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.060291] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833442, 'name': ReconfigVM_Task, 'duration_secs': 0.788202} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.061026] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Reconfigured VM instance instance-00000028 to attach disk [datastore1] 06a1a44f-35ee-45d2-9503-23468150b72f/06a1a44f-35ee-45d2-9503-23468150b72f.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 745.064868] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e27e2f34-e11b-452e-9158-429d38236de2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.072033] env[68638]: DEBUG oslo_concurrency.lockutils [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquiring lock "b9736ec5-6332-4202-95d6-a3cd1d1f11d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.072033] env[68638]: DEBUG oslo_concurrency.lockutils [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Lock "b9736ec5-6332-4202-95d6-a3cd1d1f11d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.072033] env[68638]: DEBUG oslo_concurrency.lockutils [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquiring lock "b9736ec5-6332-4202-95d6-a3cd1d1f11d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.072033] env[68638]: DEBUG oslo_concurrency.lockutils [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Lock "b9736ec5-6332-4202-95d6-a3cd1d1f11d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.073102] env[68638]: DEBUG oslo_concurrency.lockutils [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Lock "b9736ec5-6332-4202-95d6-a3cd1d1f11d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 745.076600] env[68638]: INFO nova.compute.manager [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Terminating instance [ 745.081860] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Waiting for the task: (returnval){ [ 745.081860] env[68638]: value = "task-2833445" [ 745.081860] env[68638]: _type = "Task" [ 745.081860] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.104995] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833445, 'name': Rename_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.108801] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833438, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.85982} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.109601] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 8992f062-c28f-4ac8-8d0d-0c51c3784e88/8992f062-c28f-4ac8-8d0d-0c51c3784e88.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 745.109601] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 745.109874] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7da39641-2834-4a23-a3a8-b1a9063a56a2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.118199] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 745.118199] env[68638]: value = "task-2833446" [ 745.118199] env[68638]: _type = "Task" [ 745.118199] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.126958] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833446, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.161408] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a09ebf5c-9474-49dd-8707-d287bc493ade {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.170765] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77451401-8bee-41ca-bf31-48eb551f7933 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.210948] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3d8aa99-6b4d-4241-8247-121f92695ebd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.223307] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7446011-f811-409f-b49d-760ce2424064 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.240428] env[68638]: DEBUG nova.compute.provider_tree [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 745.241994] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bf3640dd-c678-4155-81fd-b06479cd04bb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.252172] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04feaf3a-b408-4e12-ab79-b8f443d93179 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.292421] env[68638]: DEBUG nova.compute.manager [req-04ef7735-8eb6-4eca-b17d-73ca41819c43 req-dbd85ec8-0866-4ee2-9ecb-68865aa43cde service nova] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Detach interface failed, port_id=93a70134-a907-41d4-bce1-2bf7496b23bb, reason: Instance ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 745.331373] env[68638]: DEBUG nova.compute.manager [req-1d36def5-d5ed-4171-a1e0-1b68db7e8ad6 req-924f5258-dbf1-4884-80e2-adabb64742b7 service nova] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Received event network-vif-plugged-53351f41-4f72-4547-8bc4-8949546128c2 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 745.331749] env[68638]: DEBUG oslo_concurrency.lockutils [req-1d36def5-d5ed-4171-a1e0-1b68db7e8ad6 req-924f5258-dbf1-4884-80e2-adabb64742b7 service nova] Acquiring lock "2450602a-fde7-4a65-b7a2-be4195077758-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.332017] env[68638]: DEBUG oslo_concurrency.lockutils [req-1d36def5-d5ed-4171-a1e0-1b68db7e8ad6 req-924f5258-dbf1-4884-80e2-adabb64742b7 service nova] Lock "2450602a-fde7-4a65-b7a2-be4195077758-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.332274] env[68638]: DEBUG oslo_concurrency.lockutils [req-1d36def5-d5ed-4171-a1e0-1b68db7e8ad6 req-924f5258-dbf1-4884-80e2-adabb64742b7 service nova] Lock "2450602a-fde7-4a65-b7a2-be4195077758-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 745.332628] env[68638]: DEBUG nova.compute.manager [req-1d36def5-d5ed-4171-a1e0-1b68db7e8ad6 req-924f5258-dbf1-4884-80e2-adabb64742b7 service nova] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] No waiting events found dispatching network-vif-plugged-53351f41-4f72-4547-8bc4-8949546128c2 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 745.332767] env[68638]: WARNING nova.compute.manager [req-1d36def5-d5ed-4171-a1e0-1b68db7e8ad6 req-924f5258-dbf1-4884-80e2-adabb64742b7 service nova] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Received unexpected event network-vif-plugged-53351f41-4f72-4547-8bc4-8949546128c2 for instance with vm_state building and task_state spawning. [ 745.406446] env[68638]: INFO nova.compute.manager [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Rescuing [ 745.406730] env[68638]: DEBUG oslo_concurrency.lockutils [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Acquiring lock "refresh_cache-5294e1b6-f34f-4f91-aa3e-e0276ad982ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.406874] env[68638]: DEBUG oslo_concurrency.lockutils [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Acquired lock "refresh_cache-5294e1b6-f34f-4f91-aa3e-e0276ad982ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 745.407053] env[68638]: DEBUG nova.network.neutron [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 745.417745] env[68638]: DEBUG nova.network.neutron [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Successfully updated port: 53351f41-4f72-4547-8bc4-8949546128c2 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 745.469622] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b68b782-b17f-44b8-b31e-eccb1eac070f tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Lock "5294e1b6-f34f-4f91-aa3e-e0276ad982ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.731s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 745.502255] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833444, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.543592] env[68638]: INFO nova.compute.manager [-] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Took 1.70 seconds to deallocate network for instance. [ 745.583857] env[68638]: DEBUG oslo_concurrency.lockutils [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquiring lock "refresh_cache-b9736ec5-6332-4202-95d6-a3cd1d1f11d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.583967] env[68638]: DEBUG oslo_concurrency.lockutils [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquired lock "refresh_cache-b9736ec5-6332-4202-95d6-a3cd1d1f11d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 745.584155] env[68638]: DEBUG nova.network.neutron [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 745.604791] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833445, 'name': Rename_Task, 'duration_secs': 0.258137} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.607177] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 745.611698] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3b4911a3-986f-4e65-af49-0659ea16389d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.627402] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Waiting for the task: (returnval){ [ 745.627402] env[68638]: value = "task-2833447" [ 745.627402] env[68638]: _type = "Task" [ 745.627402] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.639535] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833446, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095349} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.641301] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 745.641301] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f0e60c-7a61-4c82-8a4d-a65b4b5cfd8d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.649558] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833447, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.678503] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] 8992f062-c28f-4ac8-8d0d-0c51c3784e88/8992f062-c28f-4ac8-8d0d-0c51c3784e88.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 745.679450] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e587f6b-89c0-4107-9936-826dab3f792a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.708943] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 745.708943] env[68638]: value = "task-2833448" [ 745.708943] env[68638]: _type = "Task" [ 745.708943] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.728808] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833448, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.749857] env[68638]: DEBUG nova.scheduler.client.report [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 745.923898] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquiring lock "refresh_cache-2450602a-fde7-4a65-b7a2-be4195077758" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.923898] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquired lock "refresh_cache-2450602a-fde7-4a65-b7a2-be4195077758" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 745.923898] env[68638]: DEBUG nova.network.neutron [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 745.975087] env[68638]: DEBUG nova.compute.manager [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 746.003679] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833444, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.859338} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.003679] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 94a33fcd-69b6-443b-9c86-5129e30b5b0d/94a33fcd-69b6-443b-9c86-5129e30b5b0d.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 746.004113] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 746.004147] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b40eb17e-ba0f-457f-ac78-5bc9ed41d8bc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.017984] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 746.017984] env[68638]: value = "task-2833449" [ 746.017984] env[68638]: _type = "Task" [ 746.017984] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.037363] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833449, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.054786] env[68638]: DEBUG oslo_concurrency.lockutils [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.113467] env[68638]: DEBUG oslo_concurrency.lockutils [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Acquiring lock "072be237-c51e-43d2-ad84-46122ef9f335" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.113733] env[68638]: DEBUG oslo_concurrency.lockutils [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Lock "072be237-c51e-43d2-ad84-46122ef9f335" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 746.113935] env[68638]: DEBUG oslo_concurrency.lockutils [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Acquiring lock "072be237-c51e-43d2-ad84-46122ef9f335-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.114257] env[68638]: DEBUG oslo_concurrency.lockutils [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Lock "072be237-c51e-43d2-ad84-46122ef9f335-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 746.115296] env[68638]: DEBUG oslo_concurrency.lockutils [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Lock "072be237-c51e-43d2-ad84-46122ef9f335-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 746.120734] env[68638]: INFO nova.compute.manager [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Terminating instance [ 746.122911] env[68638]: DEBUG nova.network.neutron [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 746.141832] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833447, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.201223] env[68638]: DEBUG nova.network.neutron [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Updating instance_info_cache with network_info: [{"id": "8407d492-d594-4996-8547-bfe5c27586e3", "address": "fa:16:3e:0a:8a:54", "network": {"id": "afb24962-d875-4d95-b711-2f9a3b84dddd", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1720924302-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b63d2e4e9fe24cc1aeb4b1569517ea20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8407d492-d5", "ovs_interfaceid": "8407d492-d594-4996-8547-bfe5c27586e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.203616] env[68638]: DEBUG nova.network.neutron [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.225289] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833448, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.257265] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.809s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 746.257710] env[68638]: DEBUG nova.compute.manager [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 746.263153] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.726s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 746.265071] env[68638]: INFO nova.compute.claims [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 746.486621] env[68638]: DEBUG nova.network.neutron [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 746.512824] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.533218] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833449, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078131} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.533218] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 746.533218] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf845229-c6aa-4157-a507-c6a8768b1607 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.567134] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] 94a33fcd-69b6-443b-9c86-5129e30b5b0d/94a33fcd-69b6-443b-9c86-5129e30b5b0d.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 746.567134] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1fc694ee-5589-426f-be67-02497e380d53 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.591772] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 746.591772] env[68638]: value = "task-2833451" [ 746.591772] env[68638]: _type = "Task" [ 746.591772] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.601396] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833451, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.628500] env[68638]: DEBUG nova.compute.manager [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 746.629021] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 746.629990] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f230b4f-34e5-4d6f-86b2-b380ea65e94b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.642387] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 746.648261] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3562cb79-9f54-4571-918a-eaa5b8d1d5d2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.650332] env[68638]: DEBUG oslo_vmware.api [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833447, 'name': PowerOnVM_Task, 'duration_secs': 0.999441} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.651199] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 746.651444] env[68638]: DEBUG nova.compute.manager [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 746.654623] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d177e399-ab92-4b70-b939-987d58a5010c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.703705] env[68638]: DEBUG oslo_concurrency.lockutils [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Releasing lock "refresh_cache-5294e1b6-f34f-4f91-aa3e-e0276ad982ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 746.706979] env[68638]: DEBUG oslo_concurrency.lockutils [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Releasing lock "refresh_cache-b9736ec5-6332-4202-95d6-a3cd1d1f11d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 746.707810] env[68638]: DEBUG nova.compute.manager [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 746.708458] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 746.710107] env[68638]: DEBUG nova.network.neutron [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Updating instance_info_cache with network_info: [{"id": "53351f41-4f72-4547-8bc4-8949546128c2", "address": "fa:16:3e:82:29:af", "network": {"id": "b3a6cbc1-a4f3-4ceb-b606-42cab79beecb", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1741978212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d2c1dcc55dd42c5b791dd8f1841479b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53351f41-4f", "ovs_interfaceid": "53351f41-4f72-4547-8bc4-8949546128c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.712264] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43162e4-2b61-41bc-a882-4532d0b17f59 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.728235] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833448, 'name': ReconfigVM_Task, 'duration_secs': 0.80034} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.734876] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Reconfigured VM instance instance-0000002b to attach disk [datastore2] 8992f062-c28f-4ac8-8d0d-0c51c3784e88/8992f062-c28f-4ac8-8d0d-0c51c3784e88.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 746.734876] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 746.737125] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2ef5979c-b621-455c-8655-49554936d4d9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.738964] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0a3eec9b-d3d7-4c54-b6e2-0892b7459d48 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.740879] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 746.741123] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 746.741310] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Deleting the datastore file [datastore2] 072be237-c51e-43d2-ad84-46122ef9f335 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 746.744579] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b281cf71-4272-44b3-bf0f-f85663ccca08 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.754182] env[68638]: DEBUG oslo_vmware.api [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Waiting for the task: (returnval){ [ 746.754182] env[68638]: value = "task-2833455" [ 746.754182] env[68638]: _type = "Task" [ 746.754182] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.757324] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 746.757324] env[68638]: value = "task-2833453" [ 746.757324] env[68638]: _type = "Task" [ 746.757324] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.757552] env[68638]: DEBUG oslo_vmware.api [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 746.757552] env[68638]: value = "task-2833454" [ 746.757552] env[68638]: _type = "Task" [ 746.757552] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.772791] env[68638]: DEBUG nova.compute.utils [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 746.785243] env[68638]: DEBUG nova.compute.manager [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 746.785437] env[68638]: DEBUG nova.network.neutron [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 746.788837] env[68638]: DEBUG oslo_vmware.api [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833455, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.793999] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833453, 'name': Rename_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.798135] env[68638]: DEBUG oslo_vmware.api [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833454, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.873955] env[68638]: DEBUG nova.policy [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a83ef2026afc46ce9123fe500998e2b7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '593ae658cd2c4766a09753c62f02c0c3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 747.109580] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833451, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.157432] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquiring lock "ee752ace-fa19-4fd7-af89-f6628ce3d087" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.157432] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Lock "ee752ace-fa19-4fd7-af89-f6628ce3d087" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.173735] env[68638]: DEBUG oslo_concurrency.lockutils [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.219310] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Releasing lock "refresh_cache-2450602a-fde7-4a65-b7a2-be4195077758" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 747.219795] env[68638]: DEBUG nova.compute.manager [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Instance network_info: |[{"id": "53351f41-4f72-4547-8bc4-8949546128c2", "address": "fa:16:3e:82:29:af", "network": {"id": "b3a6cbc1-a4f3-4ceb-b606-42cab79beecb", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1741978212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d2c1dcc55dd42c5b791dd8f1841479b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53351f41-4f", "ovs_interfaceid": "53351f41-4f72-4547-8bc4-8949546128c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 747.220879] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:29:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f5fe645c-e088-401e-ab53-4ae2981dea72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '53351f41-4f72-4547-8bc4-8949546128c2', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 747.237151] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 747.237151] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 747.237151] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-173867c4-6741-4e38-843d-25bb6da4816a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.263927] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 747.263927] env[68638]: value = "task-2833456" [ 747.263927] env[68638]: _type = "Task" [ 747.263927] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.281687] env[68638]: DEBUG oslo_vmware.api [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833455, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.280783} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.282501] env[68638]: DEBUG nova.compute.manager [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 747.290663] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 747.290663] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 747.290663] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 747.290663] env[68638]: INFO nova.compute.manager [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Took 0.66 seconds to destroy the instance on the hypervisor. [ 747.290910] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 747.299201] env[68638]: DEBUG nova.compute.manager [-] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 747.299317] env[68638]: DEBUG nova.network.neutron [-] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 747.302136] env[68638]: DEBUG oslo_vmware.api [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833454, 'name': PowerOffVM_Task, 'duration_secs': 0.173901} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.302357] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833453, 'name': Rename_Task, 'duration_secs': 0.203664} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.307930] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 747.311020] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 747.311020] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 747.311020] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833456, 'name': CreateVM_Task} progress is 15%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.311020] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-422f579d-282c-4b6b-b564-7cce0c9f27e8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.312391] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8cf696fa-8e22-4bd1-89c2-95a6e0572813 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.322158] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 747.322158] env[68638]: value = "task-2833457" [ 747.322158] env[68638]: _type = "Task" [ 747.322158] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.337619] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833457, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.348420] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 747.348555] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 747.348813] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Deleting the datastore file [datastore2] b9736ec5-6332-4202-95d6-a3cd1d1f11d7 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 747.351735] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-526ed443-65a6-4801-974e-30c9b6c9dc0f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.360141] env[68638]: DEBUG oslo_vmware.api [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 747.360141] env[68638]: value = "task-2833459" [ 747.360141] env[68638]: _type = "Task" [ 747.360141] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.370168] env[68638]: DEBUG oslo_vmware.api [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833459, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.427053] env[68638]: DEBUG nova.compute.manager [req-b6e2d3bd-1ff1-4006-8a78-8079b459e518 req-dd449fa4-577d-4b8e-953f-93a20c4957b3 service nova] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Received event network-changed-53351f41-4f72-4547-8bc4-8949546128c2 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 747.427155] env[68638]: DEBUG nova.compute.manager [req-b6e2d3bd-1ff1-4006-8a78-8079b459e518 req-dd449fa4-577d-4b8e-953f-93a20c4957b3 service nova] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Refreshing instance network info cache due to event network-changed-53351f41-4f72-4547-8bc4-8949546128c2. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 747.427370] env[68638]: DEBUG oslo_concurrency.lockutils [req-b6e2d3bd-1ff1-4006-8a78-8079b459e518 req-dd449fa4-577d-4b8e-953f-93a20c4957b3 service nova] Acquiring lock "refresh_cache-2450602a-fde7-4a65-b7a2-be4195077758" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.427565] env[68638]: DEBUG oslo_concurrency.lockutils [req-b6e2d3bd-1ff1-4006-8a78-8079b459e518 req-dd449fa4-577d-4b8e-953f-93a20c4957b3 service nova] Acquired lock "refresh_cache-2450602a-fde7-4a65-b7a2-be4195077758" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.427879] env[68638]: DEBUG nova.network.neutron [req-b6e2d3bd-1ff1-4006-8a78-8079b459e518 req-dd449fa4-577d-4b8e-953f-93a20c4957b3 service nova] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Refreshing network info cache for port 53351f41-4f72-4547-8bc4-8949546128c2 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 747.519514] env[68638]: DEBUG nova.network.neutron [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Successfully created port: e84b052e-af7e-4eb2-8c0c-08e6c66a2707 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 747.605609] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833451, 'name': ReconfigVM_Task, 'duration_secs': 0.67577} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.608493] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Reconfigured VM instance instance-0000002c to attach disk [datastore1] 94a33fcd-69b6-443b-9c86-5129e30b5b0d/94a33fcd-69b6-443b-9c86-5129e30b5b0d.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 747.610211] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-797dca28-f144-4cde-9877-305203b14e01 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.618537] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 747.618537] env[68638]: value = "task-2833460" [ 747.618537] env[68638]: _type = "Task" [ 747.618537] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.630167] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833460, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.790582] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833456, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.833356] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833457, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.877384] env[68638]: DEBUG oslo_vmware.api [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833459, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199261} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.880929] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 747.881230] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 747.881471] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 747.881700] env[68638]: INFO nova.compute.manager [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Took 1.17 seconds to destroy the instance on the hypervisor. [ 747.882022] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 747.883418] env[68638]: DEBUG nova.compute.manager [-] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 747.883418] env[68638]: DEBUG nova.network.neutron [-] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 747.901534] env[68638]: DEBUG nova.network.neutron [-] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 748.058346] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e1add0-3681-4346-a59a-bfef157ce5a2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.066891] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b16c7e87-5e8c-4224-b8ae-af200adab0c9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.100903] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df4fb0d-6521-4894-a8eb-6b271f2f3cd1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.109430] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-266a2c4b-728f-4b0d-9f22-16cc4ecc81e3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.127894] env[68638]: DEBUG nova.compute.provider_tree [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 748.139938] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833460, 'name': Rename_Task, 'duration_secs': 0.26016} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.139938] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 748.142268] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-514af408-aa4c-4841-aca0-6629fd1b080e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.153708] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 748.153708] env[68638]: value = "task-2833461" [ 748.153708] env[68638]: _type = "Task" [ 748.153708] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.164043] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833461, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.165532] env[68638]: DEBUG nova.network.neutron [-] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.260334] env[68638]: DEBUG nova.network.neutron [req-b6e2d3bd-1ff1-4006-8a78-8079b459e518 req-dd449fa4-577d-4b8e-953f-93a20c4957b3 service nova] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Updated VIF entry in instance network info cache for port 53351f41-4f72-4547-8bc4-8949546128c2. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 748.261548] env[68638]: DEBUG nova.network.neutron [req-b6e2d3bd-1ff1-4006-8a78-8079b459e518 req-dd449fa4-577d-4b8e-953f-93a20c4957b3 service nova] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Updating instance_info_cache with network_info: [{"id": "53351f41-4f72-4547-8bc4-8949546128c2", "address": "fa:16:3e:82:29:af", "network": {"id": "b3a6cbc1-a4f3-4ceb-b606-42cab79beecb", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1741978212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d2c1dcc55dd42c5b791dd8f1841479b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53351f41-4f", "ovs_interfaceid": "53351f41-4f72-4547-8bc4-8949546128c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.263921] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 748.263921] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-92ce5f4a-dc5a-4b4b-805b-2ebdf51de6ef {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.273332] env[68638]: DEBUG oslo_vmware.api [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Waiting for the task: (returnval){ [ 748.273332] env[68638]: value = "task-2833462" [ 748.273332] env[68638]: _type = "Task" [ 748.273332] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.287096] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833456, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.290860] env[68638]: DEBUG oslo_vmware.api [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833462, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.295154] env[68638]: DEBUG nova.compute.manager [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 748.328631] env[68638]: DEBUG nova.virt.hardware [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 748.328902] env[68638]: DEBUG nova.virt.hardware [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 748.329562] env[68638]: DEBUG nova.virt.hardware [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 748.329829] env[68638]: DEBUG nova.virt.hardware [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 748.330018] env[68638]: DEBUG nova.virt.hardware [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 748.330180] env[68638]: DEBUG nova.virt.hardware [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 748.330397] env[68638]: DEBUG nova.virt.hardware [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 748.330684] env[68638]: DEBUG nova.virt.hardware [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 748.332150] env[68638]: DEBUG nova.virt.hardware [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 748.332150] env[68638]: DEBUG nova.virt.hardware [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 748.332150] env[68638]: DEBUG nova.virt.hardware [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 748.332150] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d01fa9f-438d-4265-a712-1821cb3b3b0d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.343218] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e1661c1-7d3e-491f-80a4-13f11d46400d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.347626] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833457, 'name': PowerOnVM_Task, 'duration_secs': 0.595434} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.349063] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 748.349063] env[68638]: INFO nova.compute.manager [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Took 11.23 seconds to spawn the instance on the hypervisor. [ 748.349063] env[68638]: DEBUG nova.compute.manager [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 748.349345] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-889e0d25-bdf3-4d67-adf0-e9f1157950a5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.404264] env[68638]: DEBUG nova.network.neutron [-] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.478419] env[68638]: DEBUG oslo_concurrency.lockutils [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Acquiring lock "06a1a44f-35ee-45d2-9503-23468150b72f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.478749] env[68638]: DEBUG oslo_concurrency.lockutils [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Lock "06a1a44f-35ee-45d2-9503-23468150b72f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.479361] env[68638]: DEBUG oslo_concurrency.lockutils [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Acquiring lock "06a1a44f-35ee-45d2-9503-23468150b72f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.479819] env[68638]: DEBUG oslo_concurrency.lockutils [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Lock "06a1a44f-35ee-45d2-9503-23468150b72f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.480081] env[68638]: DEBUG oslo_concurrency.lockutils [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Lock "06a1a44f-35ee-45d2-9503-23468150b72f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.482882] env[68638]: INFO nova.compute.manager [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Terminating instance [ 748.651033] env[68638]: ERROR nova.scheduler.client.report [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [req-c8bfc70c-662d-40b2-ba36-bb9a065e43c2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c8bfc70c-662d-40b2-ba36-bb9a065e43c2"}]} [ 748.663589] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833461, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.670379] env[68638]: DEBUG nova.scheduler.client.report [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 748.672968] env[68638]: INFO nova.compute.manager [-] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Took 1.37 seconds to deallocate network for instance. [ 748.689582] env[68638]: DEBUG nova.scheduler.client.report [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 748.690051] env[68638]: DEBUG nova.compute.provider_tree [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 748.703483] env[68638]: DEBUG nova.scheduler.client.report [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 748.727915] env[68638]: DEBUG nova.scheduler.client.report [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 748.765402] env[68638]: DEBUG oslo_concurrency.lockutils [req-b6e2d3bd-1ff1-4006-8a78-8079b459e518 req-dd449fa4-577d-4b8e-953f-93a20c4957b3 service nova] Releasing lock "refresh_cache-2450602a-fde7-4a65-b7a2-be4195077758" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 748.795531] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833456, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.795855] env[68638]: DEBUG oslo_vmware.api [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833462, 'name': PowerOffVM_Task, 'duration_secs': 0.318514} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.798513] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 748.799514] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5722224-3bec-4f44-a62f-aa320f5f6955 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.822661] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f1d981-d16b-486e-9f03-19e9d0b0124f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.850881] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 748.851673] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-34e3cbba-079a-4585-8ef5-cd658c775338 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.861775] env[68638]: DEBUG oslo_vmware.api [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Waiting for the task: (returnval){ [ 748.861775] env[68638]: value = "task-2833464" [ 748.861775] env[68638]: _type = "Task" [ 748.861775] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.877313] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] VM already powered off {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 748.877523] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 748.877759] env[68638]: DEBUG oslo_concurrency.lockutils [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.877904] env[68638]: DEBUG oslo_concurrency.lockutils [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.878092] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 748.879910] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9c1d4da7-a8a4-4471-a7b5-9cc3af167822 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.881944] env[68638]: INFO nova.compute.manager [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Took 47.24 seconds to build instance. [ 748.892999] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 748.893202] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 748.894030] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbd07970-88dc-44bc-b858-d8d646a53338 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.900587] env[68638]: DEBUG oslo_vmware.api [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Waiting for the task: (returnval){ [ 748.900587] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5280dfd5-0930-78a3-c26f-ff32c8e3f670" [ 748.900587] env[68638]: _type = "Task" [ 748.900587] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.906865] env[68638]: INFO nova.compute.manager [-] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Took 1.02 seconds to deallocate network for instance. [ 748.915060] env[68638]: DEBUG oslo_vmware.api [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5280dfd5-0930-78a3-c26f-ff32c8e3f670, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.987729] env[68638]: DEBUG oslo_concurrency.lockutils [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Acquiring lock "refresh_cache-06a1a44f-35ee-45d2-9503-23468150b72f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.987958] env[68638]: DEBUG oslo_concurrency.lockutils [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Acquired lock "refresh_cache-06a1a44f-35ee-45d2-9503-23468150b72f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.988202] env[68638]: DEBUG nova.network.neutron [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 749.165441] env[68638]: DEBUG oslo_vmware.api [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833461, 'name': PowerOnVM_Task, 'duration_secs': 0.66102} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.171327] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 749.171583] env[68638]: INFO nova.compute.manager [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Took 9.18 seconds to spawn the instance on the hypervisor. [ 749.171771] env[68638]: DEBUG nova.compute.manager [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 749.172904] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2d14b4-1b10-4d26-a005-4ec5a3b9428b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.180613] env[68638]: DEBUG oslo_concurrency.lockutils [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.246507] env[68638]: DEBUG nova.compute.manager [req-d0e77db7-0b4b-4989-9d64-9bdf166ff026 req-b2c28e0f-ebbf-414c-8d85-2ef98facf4a4 service nova] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Received event network-vif-plugged-e84b052e-af7e-4eb2-8c0c-08e6c66a2707 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 749.246739] env[68638]: DEBUG oslo_concurrency.lockutils [req-d0e77db7-0b4b-4989-9d64-9bdf166ff026 req-b2c28e0f-ebbf-414c-8d85-2ef98facf4a4 service nova] Acquiring lock "a3b06e32-2670-4381-bb91-4597bfcabaa6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.247193] env[68638]: DEBUG oslo_concurrency.lockutils [req-d0e77db7-0b4b-4989-9d64-9bdf166ff026 req-b2c28e0f-ebbf-414c-8d85-2ef98facf4a4 service nova] Lock "a3b06e32-2670-4381-bb91-4597bfcabaa6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.247377] env[68638]: DEBUG oslo_concurrency.lockutils [req-d0e77db7-0b4b-4989-9d64-9bdf166ff026 req-b2c28e0f-ebbf-414c-8d85-2ef98facf4a4 service nova] Lock "a3b06e32-2670-4381-bb91-4597bfcabaa6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.247547] env[68638]: DEBUG nova.compute.manager [req-d0e77db7-0b4b-4989-9d64-9bdf166ff026 req-b2c28e0f-ebbf-414c-8d85-2ef98facf4a4 service nova] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] No waiting events found dispatching network-vif-plugged-e84b052e-af7e-4eb2-8c0c-08e6c66a2707 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 749.247708] env[68638]: WARNING nova.compute.manager [req-d0e77db7-0b4b-4989-9d64-9bdf166ff026 req-b2c28e0f-ebbf-414c-8d85-2ef98facf4a4 service nova] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Received unexpected event network-vif-plugged-e84b052e-af7e-4eb2-8c0c-08e6c66a2707 for instance with vm_state building and task_state spawning. [ 749.295924] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833456, 'name': CreateVM_Task, 'duration_secs': 1.907421} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.296147] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 749.298764] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.299072] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.299358] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 749.299675] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-983d8201-c575-45d3-87f9-278e5cb1febb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.312353] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 749.312353] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5222c94a-c5e4-7ae9-1650-6265a780da20" [ 749.312353] env[68638]: _type = "Task" [ 749.312353] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.324966] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5222c94a-c5e4-7ae9-1650-6265a780da20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.340078] env[68638]: DEBUG nova.network.neutron [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Successfully updated port: e84b052e-af7e-4eb2-8c0c-08e6c66a2707 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 749.384207] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "8992f062-c28f-4ac8-8d0d-0c51c3784e88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.047s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.401893] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3f1a885-8d47-4b6c-9c83-2c180c3f5346 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.417584] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d728bf8f-6f1d-4590-9086-ea405c64698c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.421627] env[68638]: DEBUG oslo_concurrency.lockutils [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.421906] env[68638]: DEBUG oslo_vmware.api [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5280dfd5-0930-78a3-c26f-ff32c8e3f670, 'name': SearchDatastore_Task, 'duration_secs': 0.027135} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.423323] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e51a692b-298e-42dc-aaf4-464ac8e97fdb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.455033] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b196a4-0204-41ee-95f9-2ca7d5416c67 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.457781] env[68638]: DEBUG oslo_vmware.api [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Waiting for the task: (returnval){ [ 749.457781] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52300b6a-8ffb-e473-add8-88c3ad4552d5" [ 749.457781] env[68638]: _type = "Task" [ 749.457781] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.464630] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc4b6da-e448-4fa5-82fc-96b1d9bca936 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.472064] env[68638]: DEBUG oslo_vmware.api [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52300b6a-8ffb-e473-add8-88c3ad4552d5, 'name': SearchDatastore_Task, 'duration_secs': 0.016591} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.472677] env[68638]: DEBUG oslo_concurrency.lockutils [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.473044] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 5294e1b6-f34f-4f91-aa3e-e0276ad982ee/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9-rescue.vmdk. {{(pid=68638) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 749.473308] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5190966-00ee-4bc2-b744-706923e111e2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.483096] env[68638]: DEBUG nova.compute.provider_tree [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 749.494596] env[68638]: DEBUG oslo_vmware.api [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Waiting for the task: (returnval){ [ 749.494596] env[68638]: value = "task-2833465" [ 749.494596] env[68638]: _type = "Task" [ 749.494596] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.506772] env[68638]: DEBUG oslo_vmware.api [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833465, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.516835] env[68638]: DEBUG nova.network.neutron [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 749.575299] env[68638]: DEBUG nova.network.neutron [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.671196] env[68638]: DEBUG nova.compute.manager [req-dc7be161-4dee-49b7-8fbe-9398d97caddd req-454d595f-f07f-420e-b459-439897214515 service nova] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Received event network-vif-deleted-f312d14a-da0d-42ec-a0a3-2f652c818f26 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 749.694839] env[68638]: INFO nova.compute.manager [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Took 47.89 seconds to build instance. [ 749.835778] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5222c94a-c5e4-7ae9-1650-6265a780da20, 'name': SearchDatastore_Task, 'duration_secs': 0.015752} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.836214] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.836520] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 749.836752] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.836855] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.837044] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 749.837354] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e8fe446-311a-45ed-897b-3ebc0c4eb9d6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.843562] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Acquiring lock "refresh_cache-a3b06e32-2670-4381-bb91-4597bfcabaa6" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.843562] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Acquired lock "refresh_cache-a3b06e32-2670-4381-bb91-4597bfcabaa6" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.843562] env[68638]: DEBUG nova.network.neutron [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 749.849626] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 749.849831] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 749.852549] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-601420a5-7b79-41f7-8ec7-e37b684f8d91 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.859783] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 749.859783] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52a05dc1-ce84-7d0f-9572-8002d1d9bc54" [ 749.859783] env[68638]: _type = "Task" [ 749.859783] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.874516] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a05dc1-ce84-7d0f-9572-8002d1d9bc54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.891881] env[68638]: DEBUG nova.compute.manager [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 750.006633] env[68638]: DEBUG oslo_vmware.api [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833465, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.020823] env[68638]: DEBUG nova.scheduler.client.report [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 72 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 750.021200] env[68638]: DEBUG nova.compute.provider_tree [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 72 to 73 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 750.021394] env[68638]: DEBUG nova.compute.provider_tree [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 750.081366] env[68638]: DEBUG oslo_concurrency.lockutils [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Releasing lock "refresh_cache-06a1a44f-35ee-45d2-9503-23468150b72f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.081802] env[68638]: DEBUG nova.compute.manager [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 750.082168] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 750.083039] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "94a33fcd-69b6-443b-9c86-5129e30b5b0d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.083840] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc0353e-cb45-4866-b1b9-8e2668a01b17 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.094068] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 750.094392] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f9a59bb0-a080-4c50-8926-89e577b6a0c1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.103660] env[68638]: DEBUG oslo_vmware.api [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Waiting for the task: (returnval){ [ 750.103660] env[68638]: value = "task-2833466" [ 750.103660] env[68638]: _type = "Task" [ 750.103660] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.113093] env[68638]: DEBUG oslo_vmware.api [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833466, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.198688] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ec48885-e51f-4735-9896-5722887670c2 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "94a33fcd-69b6-443b-9c86-5129e30b5b0d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.818s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.199607] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "94a33fcd-69b6-443b-9c86-5129e30b5b0d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.116s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.199607] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "94a33fcd-69b6-443b-9c86-5129e30b5b0d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.199803] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "94a33fcd-69b6-443b-9c86-5129e30b5b0d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.200091] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "94a33fcd-69b6-443b-9c86-5129e30b5b0d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.202022] env[68638]: INFO nova.compute.manager [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Terminating instance [ 750.372992] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a05dc1-ce84-7d0f-9572-8002d1d9bc54, 'name': SearchDatastore_Task, 'duration_secs': 0.024421} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.374356] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9d40db9-b5cc-42c7-ba38-e32e66d85852 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.381069] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 750.381069] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52300c96-f938-8c2e-021d-c8ac7b50d578" [ 750.381069] env[68638]: _type = "Task" [ 750.381069] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.384396] env[68638]: DEBUG oslo_concurrency.lockutils [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "8992f062-c28f-4ac8-8d0d-0c51c3784e88" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.384636] env[68638]: DEBUG oslo_concurrency.lockutils [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "8992f062-c28f-4ac8-8d0d-0c51c3784e88" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.384871] env[68638]: DEBUG oslo_concurrency.lockutils [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "8992f062-c28f-4ac8-8d0d-0c51c3784e88-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.385120] env[68638]: DEBUG oslo_concurrency.lockutils [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "8992f062-c28f-4ac8-8d0d-0c51c3784e88-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.385322] env[68638]: DEBUG oslo_concurrency.lockutils [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "8992f062-c28f-4ac8-8d0d-0c51c3784e88-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.387779] env[68638]: DEBUG nova.network.neutron [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.390347] env[68638]: INFO nova.compute.manager [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Terminating instance [ 750.397804] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52300c96-f938-8c2e-021d-c8ac7b50d578, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.419815] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.508199] env[68638]: DEBUG oslo_vmware.api [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833465, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.644536} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.508199] env[68638]: INFO nova.virt.vmwareapi.ds_util [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 5294e1b6-f34f-4f91-aa3e-e0276ad982ee/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9-rescue.vmdk. [ 750.508199] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaee5b41-e823-40e5-a18a-4a918f331763 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.528143] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.265s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.528667] env[68638]: DEBUG nova.compute.manager [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 750.539304] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Reconfiguring VM instance instance-0000002a to attach disk [datastore2] 5294e1b6-f34f-4f91-aa3e-e0276ad982ee/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9-rescue.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 750.543224] env[68638]: DEBUG oslo_concurrency.lockutils [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.428s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.543475] env[68638]: DEBUG nova.objects.instance [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Lazy-loading 'resources' on Instance uuid ebd7dd7a-2565-45da-bf7a-b8047c54ebe4 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 750.544988] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8bc12a96-ac77-4766-94c7-d8f9ae4ceae6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.568429] env[68638]: DEBUG oslo_vmware.api [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Waiting for the task: (returnval){ [ 750.568429] env[68638]: value = "task-2833468" [ 750.568429] env[68638]: _type = "Task" [ 750.568429] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.579356] env[68638]: DEBUG oslo_vmware.api [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833468, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.600950] env[68638]: DEBUG nova.network.neutron [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Updating instance_info_cache with network_info: [{"id": "e84b052e-af7e-4eb2-8c0c-08e6c66a2707", "address": "fa:16:3e:a5:07:ee", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.227", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape84b052e-af", "ovs_interfaceid": "e84b052e-af7e-4eb2-8c0c-08e6c66a2707", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.616212] env[68638]: DEBUG oslo_vmware.api [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833466, 'name': PowerOffVM_Task, 'duration_secs': 0.292077} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.616808] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 750.617158] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 750.617306] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e9282344-5274-4c6c-8dd8-725e600958be {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.649075] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 750.649408] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 750.649682] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Deleting the datastore file [datastore1] 06a1a44f-35ee-45d2-9503-23468150b72f {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 750.649929] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e2262e14-62a6-4e6d-a993-6dbbfdc7b18c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.658975] env[68638]: DEBUG oslo_vmware.api [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Waiting for the task: (returnval){ [ 750.658975] env[68638]: value = "task-2833470" [ 750.658975] env[68638]: _type = "Task" [ 750.658975] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.667940] env[68638]: DEBUG oslo_vmware.api [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833470, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.702461] env[68638]: DEBUG nova.compute.manager [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 750.705814] env[68638]: DEBUG nova.compute.manager [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 750.706065] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 750.707253] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437ea3a0-f63c-44ea-b8dc-6092b8c025be {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.716387] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 750.716739] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82be2eef-2731-49d7-99ca-e0569fcef22e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.728496] env[68638]: DEBUG oslo_vmware.api [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 750.728496] env[68638]: value = "task-2833471" [ 750.728496] env[68638]: _type = "Task" [ 750.728496] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.741056] env[68638]: DEBUG oslo_vmware.api [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833471, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.897796] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52300c96-f938-8c2e-021d-c8ac7b50d578, 'name': SearchDatastore_Task, 'duration_secs': 0.015999} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.898128] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.898482] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 2450602a-fde7-4a65-b7a2-be4195077758/2450602a-fde7-4a65-b7a2-be4195077758.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 750.899369] env[68638]: DEBUG nova.compute.manager [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 750.899623] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 750.899913] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4a251203-fd2c-4218-8bc3-c9da72c3805f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.902998] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11d21693-09f2-45e0-9b31-5d4903c34211 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.914188] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 750.915945] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b841332d-16f1-4ab5-887c-f8887840be99 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.918128] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 750.918128] env[68638]: value = "task-2833472" [ 750.918128] env[68638]: _type = "Task" [ 750.918128] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.925314] env[68638]: DEBUG oslo_vmware.api [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 750.925314] env[68638]: value = "task-2833473" [ 750.925314] env[68638]: _type = "Task" [ 750.925314] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.933296] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833472, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.948365] env[68638]: DEBUG oslo_vmware.api [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833473, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.044696] env[68638]: DEBUG nova.compute.utils [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 751.046317] env[68638]: DEBUG nova.compute.manager [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 751.046498] env[68638]: DEBUG nova.network.neutron [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 751.080975] env[68638]: DEBUG oslo_vmware.api [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833468, 'name': ReconfigVM_Task, 'duration_secs': 0.388167} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.081502] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Reconfigured VM instance instance-0000002a to attach disk [datastore2] 5294e1b6-f34f-4f91-aa3e-e0276ad982ee/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9-rescue.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 751.082210] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-055f047e-6964-44a1-bf28-22fb9c170227 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.105911] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Releasing lock "refresh_cache-a3b06e32-2670-4381-bb91-4597bfcabaa6" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.106281] env[68638]: DEBUG nova.compute.manager [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Instance network_info: |[{"id": "e84b052e-af7e-4eb2-8c0c-08e6c66a2707", "address": "fa:16:3e:a5:07:ee", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.227", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape84b052e-af", "ovs_interfaceid": "e84b052e-af7e-4eb2-8c0c-08e6c66a2707", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 751.114489] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:07:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '205fb402-8eaf-4b61-8f57-8f216024179a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e84b052e-af7e-4eb2-8c0c-08e6c66a2707', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 751.122158] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Creating folder: Project (593ae658cd2c4766a09753c62f02c0c3). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 751.122488] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ccdd82d-107e-49cd-b7da-67ade12087a5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.134407] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12815c7c-84a3-4b9e-ac95-d476451d88ca {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.144890] env[68638]: DEBUG oslo_vmware.api [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Waiting for the task: (returnval){ [ 751.144890] env[68638]: value = "task-2833474" [ 751.144890] env[68638]: _type = "Task" [ 751.144890] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.153152] env[68638]: DEBUG nova.policy [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34d3f08ca4c44eecb3238404c3728f0c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e92752f6508d4e0eae7e29247444a38f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 751.157653] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Created folder: Project (593ae658cd2c4766a09753c62f02c0c3) in parent group-v569734. [ 751.157752] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Creating folder: Instances. Parent ref: group-v569871. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 751.158385] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-34560f92-6cc7-4719-a5cc-a9b7edeb0a57 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.168206] env[68638]: DEBUG oslo_vmware.api [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833474, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.179422] env[68638]: DEBUG oslo_vmware.api [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Task: {'id': task-2833470, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172173} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.179422] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 751.179422] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 751.179825] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 751.179825] env[68638]: INFO nova.compute.manager [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Took 1.10 seconds to destroy the instance on the hypervisor. [ 751.180187] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 751.184524] env[68638]: DEBUG nova.compute.manager [-] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 751.184524] env[68638]: DEBUG nova.network.neutron [-] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 751.184524] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Created folder: Instances in parent group-v569871. [ 751.184524] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 751.184792] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 751.185203] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dda3bc8e-c1d3-4aa9-b6f2-e1caf2c24969 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.204483] env[68638]: DEBUG nova.network.neutron [-] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 751.220652] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 751.220652] env[68638]: value = "task-2833477" [ 751.220652] env[68638]: _type = "Task" [ 751.220652] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.234060] env[68638]: DEBUG oslo_concurrency.lockutils [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 751.242333] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833477, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.244936] env[68638]: DEBUG oslo_vmware.api [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833471, 'name': PowerOffVM_Task, 'duration_secs': 0.277137} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.247020] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 751.247020] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 751.247020] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0a48384b-52be-455e-b074-36c4aceb9701 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.332672] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 751.332956] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 751.333169] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Deleting the datastore file [datastore1] 94a33fcd-69b6-443b-9c86-5129e30b5b0d {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 751.333443] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d694cdcc-1d14-41ca-8563-610c2d213fff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.341921] env[68638]: DEBUG oslo_vmware.api [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 751.341921] env[68638]: value = "task-2833479" [ 751.341921] env[68638]: _type = "Task" [ 751.341921] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.361314] env[68638]: DEBUG oslo_vmware.api [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833479, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.420935] env[68638]: DEBUG nova.compute.manager [req-50c25ff7-6324-46c8-b952-607726ad1a9c req-42e9bff5-2dff-4a7e-b98b-41f65dab7b8c service nova] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Received event network-changed-e84b052e-af7e-4eb2-8c0c-08e6c66a2707 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 751.420935] env[68638]: DEBUG nova.compute.manager [req-50c25ff7-6324-46c8-b952-607726ad1a9c req-42e9bff5-2dff-4a7e-b98b-41f65dab7b8c service nova] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Refreshing instance network info cache due to event network-changed-e84b052e-af7e-4eb2-8c0c-08e6c66a2707. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 751.420935] env[68638]: DEBUG oslo_concurrency.lockutils [req-50c25ff7-6324-46c8-b952-607726ad1a9c req-42e9bff5-2dff-4a7e-b98b-41f65dab7b8c service nova] Acquiring lock "refresh_cache-a3b06e32-2670-4381-bb91-4597bfcabaa6" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.420935] env[68638]: DEBUG oslo_concurrency.lockutils [req-50c25ff7-6324-46c8-b952-607726ad1a9c req-42e9bff5-2dff-4a7e-b98b-41f65dab7b8c service nova] Acquired lock "refresh_cache-a3b06e32-2670-4381-bb91-4597bfcabaa6" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.421553] env[68638]: DEBUG nova.network.neutron [req-50c25ff7-6324-46c8-b952-607726ad1a9c req-42e9bff5-2dff-4a7e-b98b-41f65dab7b8c service nova] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Refreshing network info cache for port e84b052e-af7e-4eb2-8c0c-08e6c66a2707 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 751.442519] env[68638]: DEBUG oslo_vmware.api [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833473, 'name': PowerOffVM_Task, 'duration_secs': 0.243875} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.448515] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 751.448705] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 751.449371] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833472, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.450037] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-06e14284-6215-4939-9c29-24a0e7ae42c1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.526244] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 751.526495] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 751.526674] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Deleting the datastore file [datastore2] 8992f062-c28f-4ac8-8d0d-0c51c3784e88 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 751.526973] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-39af456c-2499-4318-9c07-1d1db3d3bb53 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.536697] env[68638]: DEBUG oslo_vmware.api [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for the task: (returnval){ [ 751.536697] env[68638]: value = "task-2833481" [ 751.536697] env[68638]: _type = "Task" [ 751.536697] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.548451] env[68638]: DEBUG oslo_vmware.api [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833481, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.549258] env[68638]: DEBUG nova.compute.manager [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 751.628061] env[68638]: DEBUG nova.network.neutron [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Successfully created port: 0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 751.660630] env[68638]: DEBUG oslo_vmware.api [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833474, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.712306] env[68638]: DEBUG nova.network.neutron [-] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.730642] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833477, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.840536] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff3561ce-719d-4041-a9ca-ce803bf37f53 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.852179] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dce9d599-b4ed-41f0-b9a9-05014ff89a6f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.859043] env[68638]: DEBUG oslo_vmware.api [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833479, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.348378} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.859987] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 751.860253] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 751.860403] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 751.860581] env[68638]: INFO nova.compute.manager [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Took 1.15 seconds to destroy the instance on the hypervisor. [ 751.860820] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 751.861023] env[68638]: DEBUG nova.compute.manager [-] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 751.861122] env[68638]: DEBUG nova.network.neutron [-] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 751.892039] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0a3a19d-961a-495d-a73d-741916829894 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.900362] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-382b65bf-7ed1-459f-bc58-38d9f5e3e1f3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.914323] env[68638]: DEBUG nova.compute.provider_tree [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 751.930332] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833472, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.599732} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.930575] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 2450602a-fde7-4a65-b7a2-be4195077758/2450602a-fde7-4a65-b7a2-be4195077758.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 751.930792] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 751.931401] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e97e40c5-084e-4936-96c0-e7ca8938179d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.939509] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 751.939509] env[68638]: value = "task-2833482" [ 751.939509] env[68638]: _type = "Task" [ 751.939509] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.949482] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833482, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.047876] env[68638]: DEBUG oslo_vmware.api [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Task: {'id': task-2833481, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.237781} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.048169] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 752.048354] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 752.048854] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 752.048854] env[68638]: INFO nova.compute.manager [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Took 1.15 seconds to destroy the instance on the hypervisor. [ 752.048972] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 752.049119] env[68638]: DEBUG nova.compute.manager [-] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 752.049214] env[68638]: DEBUG nova.network.neutron [-] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 752.157849] env[68638]: DEBUG oslo_vmware.api [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833474, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.216806] env[68638]: INFO nova.compute.manager [-] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Took 1.03 seconds to deallocate network for instance. [ 752.233497] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833477, 'name': CreateVM_Task, 'duration_secs': 0.534056} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.233665] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 752.234771] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.234934] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 752.235261] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 752.235582] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-174aa4a0-2288-40be-a93f-c4a3a352dd81 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.241807] env[68638]: DEBUG oslo_vmware.api [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Waiting for the task: (returnval){ [ 752.241807] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e1f2c1-37bb-d0fe-7122-5a74f856262f" [ 752.241807] env[68638]: _type = "Task" [ 752.241807] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.253277] env[68638]: DEBUG oslo_vmware.api [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e1f2c1-37bb-d0fe-7122-5a74f856262f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.321653] env[68638]: DEBUG nova.network.neutron [req-50c25ff7-6324-46c8-b952-607726ad1a9c req-42e9bff5-2dff-4a7e-b98b-41f65dab7b8c service nova] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Updated VIF entry in instance network info cache for port e84b052e-af7e-4eb2-8c0c-08e6c66a2707. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 752.321929] env[68638]: DEBUG nova.network.neutron [req-50c25ff7-6324-46c8-b952-607726ad1a9c req-42e9bff5-2dff-4a7e-b98b-41f65dab7b8c service nova] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Updating instance_info_cache with network_info: [{"id": "e84b052e-af7e-4eb2-8c0c-08e6c66a2707", "address": "fa:16:3e:a5:07:ee", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.227", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape84b052e-af", "ovs_interfaceid": "e84b052e-af7e-4eb2-8c0c-08e6c66a2707", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.418078] env[68638]: DEBUG nova.scheduler.client.report [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 752.449684] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833482, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068058} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.449958] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 752.450771] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d94f07d4-b52d-47d9-8f87-8543cbbbe1b2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.475891] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] 2450602a-fde7-4a65-b7a2-be4195077758/2450602a-fde7-4a65-b7a2-be4195077758.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 752.476198] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f8ff0ed-53f6-4f2a-a6af-4d46b1fcfeb8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.498478] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 752.498478] env[68638]: value = "task-2833483" [ 752.498478] env[68638]: _type = "Task" [ 752.498478] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.507576] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833483, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.562024] env[68638]: DEBUG nova.compute.manager [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 752.591099] env[68638]: DEBUG nova.virt.hardware [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:29:41Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='a203cce6-fe96-4a10-ad18-80d29521d33f',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-493947233',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 752.591099] env[68638]: DEBUG nova.virt.hardware [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 752.591099] env[68638]: DEBUG nova.virt.hardware [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 752.591099] env[68638]: DEBUG nova.virt.hardware [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 752.591469] env[68638]: DEBUG nova.virt.hardware [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 752.591469] env[68638]: DEBUG nova.virt.hardware [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 752.591862] env[68638]: DEBUG nova.virt.hardware [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 752.591862] env[68638]: DEBUG nova.virt.hardware [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 752.591966] env[68638]: DEBUG nova.virt.hardware [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 752.592164] env[68638]: DEBUG nova.virt.hardware [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 752.592339] env[68638]: DEBUG nova.virt.hardware [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 752.593535] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f7eedb-31e4-47f5-850b-7eca7c2b9ae5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.604808] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b0aec8a-a093-4ade-9f76-cd534a4f1601 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.657037] env[68638]: DEBUG oslo_vmware.api [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833474, 'name': ReconfigVM_Task, 'duration_secs': 1.218757} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.657820] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 752.657820] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c42a216a-2473-4c4c-b9ca-44e59fd6a9d8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.665527] env[68638]: DEBUG oslo_vmware.api [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Waiting for the task: (returnval){ [ 752.665527] env[68638]: value = "task-2833484" [ 752.665527] env[68638]: _type = "Task" [ 752.665527] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.674441] env[68638]: DEBUG oslo_vmware.api [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833484, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.729387] env[68638]: DEBUG oslo_concurrency.lockutils [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 752.754582] env[68638]: DEBUG oslo_vmware.api [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e1f2c1-37bb-d0fe-7122-5a74f856262f, 'name': SearchDatastore_Task, 'duration_secs': 0.011188} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.754911] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.754959] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 752.755255] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.755408] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 752.755582] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 752.755863] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-63a17431-c100-4095-b6d7-bec50fe79252 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.765952] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 752.766148] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 752.766906] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-039e0270-a81c-4ac2-a1ba-dc655d4c32cd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.773387] env[68638]: DEBUG nova.compute.manager [req-ada765f2-be59-45c0-afb5-e43cea87763d req-ae4592ef-6b05-4545-9df2-40342ce858fa service nova] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Received event network-vif-deleted-ababd3bc-a199-4001-b965-7ba88550ead8 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 752.773491] env[68638]: INFO nova.compute.manager [req-ada765f2-be59-45c0-afb5-e43cea87763d req-ae4592ef-6b05-4545-9df2-40342ce858fa service nova] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Neutron deleted interface ababd3bc-a199-4001-b965-7ba88550ead8; detaching it from the instance and deleting it from the info cache [ 752.774282] env[68638]: DEBUG nova.network.neutron [req-ada765f2-be59-45c0-afb5-e43cea87763d req-ae4592ef-6b05-4545-9df2-40342ce858fa service nova] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.777006] env[68638]: DEBUG oslo_vmware.api [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Waiting for the task: (returnval){ [ 752.777006] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52826b0f-a14d-23b7-7ca6-1eca2378d501" [ 752.777006] env[68638]: _type = "Task" [ 752.777006] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.788842] env[68638]: DEBUG oslo_vmware.api [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52826b0f-a14d-23b7-7ca6-1eca2378d501, 'name': SearchDatastore_Task, 'duration_secs': 0.010775} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.789676] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b666b325-f7cb-4a44-9cac-2a729e7c0494 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.799187] env[68638]: DEBUG oslo_vmware.api [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Waiting for the task: (returnval){ [ 752.799187] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]527c924d-7e1d-35fd-dcbc-7f52f70c63cd" [ 752.799187] env[68638]: _type = "Task" [ 752.799187] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.808181] env[68638]: DEBUG oslo_vmware.api [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527c924d-7e1d-35fd-dcbc-7f52f70c63cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.825183] env[68638]: DEBUG oslo_concurrency.lockutils [req-50c25ff7-6324-46c8-b952-607726ad1a9c req-42e9bff5-2dff-4a7e-b98b-41f65dab7b8c service nova] Releasing lock "refresh_cache-a3b06e32-2670-4381-bb91-4597bfcabaa6" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.922965] env[68638]: DEBUG oslo_concurrency.lockutils [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.380s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 752.926115] env[68638]: DEBUG oslo_concurrency.lockutils [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.751s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 752.926115] env[68638]: DEBUG nova.objects.instance [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lazy-loading 'resources' on Instance uuid 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 752.948602] env[68638]: INFO nova.scheduler.client.report [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Deleted allocations for instance ebd7dd7a-2565-45da-bf7a-b8047c54ebe4 [ 753.011926] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833483, 'name': ReconfigVM_Task, 'duration_secs': 0.433102} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.013045] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Reconfigured VM instance instance-0000002d to attach disk [datastore1] 2450602a-fde7-4a65-b7a2-be4195077758/2450602a-fde7-4a65-b7a2-be4195077758.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 753.013045] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9606b57d-d209-4fcb-a6fb-1caec9d3c685 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.020449] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 753.020449] env[68638]: value = "task-2833485" [ 753.020449] env[68638]: _type = "Task" [ 753.020449] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.029798] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833485, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.072466] env[68638]: DEBUG nova.network.neutron [-] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.074178] env[68638]: DEBUG nova.network.neutron [-] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.176134] env[68638]: DEBUG oslo_vmware.api [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833484, 'name': PowerOnVM_Task, 'duration_secs': 0.421354} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.176426] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 753.179262] env[68638]: DEBUG nova.compute.manager [None req-02048efb-5af5-4e6d-a6f6-c765ad4239a4 tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 753.180067] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f0764d0-b103-4619-a7cb-3679e1f0afa3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.278831] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8affe4bf-4cd6-465d-837f-6c1f81daa054 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.293048] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b697793-0f0f-40fc-ac45-dc43b7b32066 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.317800] env[68638]: DEBUG oslo_vmware.api [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527c924d-7e1d-35fd-dcbc-7f52f70c63cd, 'name': SearchDatastore_Task, 'duration_secs': 0.010188} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.318134] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.318409] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] a3b06e32-2670-4381-bb91-4597bfcabaa6/a3b06e32-2670-4381-bb91-4597bfcabaa6.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 753.318681] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5dd1aaf0-098f-43a0-b1c9-88bac3b18244 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.335893] env[68638]: DEBUG nova.compute.manager [req-ada765f2-be59-45c0-afb5-e43cea87763d req-ae4592ef-6b05-4545-9df2-40342ce858fa service nova] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Detach interface failed, port_id=ababd3bc-a199-4001-b965-7ba88550ead8, reason: Instance 8992f062-c28f-4ac8-8d0d-0c51c3784e88 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 753.338116] env[68638]: DEBUG oslo_vmware.api [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Waiting for the task: (returnval){ [ 753.338116] env[68638]: value = "task-2833486" [ 753.338116] env[68638]: _type = "Task" [ 753.338116] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.347613] env[68638]: DEBUG oslo_vmware.api [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Task: {'id': task-2833486, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.456351] env[68638]: DEBUG oslo_concurrency.lockutils [None req-174d7efa-9208-4734-86ce-7b1068eae577 tempest-AttachInterfacesV270Test-419941254 tempest-AttachInterfacesV270Test-419941254-project-member] Lock "ebd7dd7a-2565-45da-bf7a-b8047c54ebe4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.813s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 753.527191] env[68638]: DEBUG nova.compute.manager [req-4e59b750-6ea2-4e96-bc00-db8b3b93d7ed req-0c938f53-f7de-4dbd-9281-8d30e8c58e19 service nova] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Received event network-vif-plugged-0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 753.527399] env[68638]: DEBUG oslo_concurrency.lockutils [req-4e59b750-6ea2-4e96-bc00-db8b3b93d7ed req-0c938f53-f7de-4dbd-9281-8d30e8c58e19 service nova] Acquiring lock "a09c4492-34fd-4010-b547-bfb5b61f252d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 753.527604] env[68638]: DEBUG oslo_concurrency.lockutils [req-4e59b750-6ea2-4e96-bc00-db8b3b93d7ed req-0c938f53-f7de-4dbd-9281-8d30e8c58e19 service nova] Lock "a09c4492-34fd-4010-b547-bfb5b61f252d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 753.527962] env[68638]: DEBUG oslo_concurrency.lockutils [req-4e59b750-6ea2-4e96-bc00-db8b3b93d7ed req-0c938f53-f7de-4dbd-9281-8d30e8c58e19 service nova] Lock "a09c4492-34fd-4010-b547-bfb5b61f252d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 753.528363] env[68638]: DEBUG nova.compute.manager [req-4e59b750-6ea2-4e96-bc00-db8b3b93d7ed req-0c938f53-f7de-4dbd-9281-8d30e8c58e19 service nova] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] No waiting events found dispatching network-vif-plugged-0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 753.528444] env[68638]: WARNING nova.compute.manager [req-4e59b750-6ea2-4e96-bc00-db8b3b93d7ed req-0c938f53-f7de-4dbd-9281-8d30e8c58e19 service nova] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Received unexpected event network-vif-plugged-0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce for instance with vm_state building and task_state spawning. [ 753.537844] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833485, 'name': Rename_Task, 'duration_secs': 0.31193} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.538425] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 753.538681] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3894cf82-2188-4391-8a7f-3212e26df6bd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.548218] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 753.548218] env[68638]: value = "task-2833487" [ 753.548218] env[68638]: _type = "Task" [ 753.548218] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.567645] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833487, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.578686] env[68638]: INFO nova.compute.manager [-] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Took 1.53 seconds to deallocate network for instance. [ 753.579106] env[68638]: INFO nova.compute.manager [-] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Took 1.72 seconds to deallocate network for instance. [ 753.707143] env[68638]: DEBUG nova.network.neutron [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Successfully updated port: 0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 753.855416] env[68638]: DEBUG oslo_vmware.api [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Task: {'id': task-2833486, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505262} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.855751] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] a3b06e32-2670-4381-bb91-4597bfcabaa6/a3b06e32-2670-4381-bb91-4597bfcabaa6.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 753.856055] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 753.859692] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c902b449-0f41-4d13-8e37-ecc57c4f5b76 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.881303] env[68638]: DEBUG oslo_vmware.api [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Waiting for the task: (returnval){ [ 753.881303] env[68638]: value = "task-2833488" [ 753.881303] env[68638]: _type = "Task" [ 753.881303] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.891171] env[68638]: DEBUG oslo_vmware.api [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Task: {'id': task-2833488, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.057996] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833487, 'name': PowerOnVM_Task} progress is 87%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.091047] env[68638]: DEBUG oslo_concurrency.lockutils [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.098676] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.138725] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b74c0a-32b2-45ed-9e7f-20222fae5817 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.147473] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4ed43b-f246-4a30-ab8e-77d0e096311f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.183326] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca53d12b-5556-426e-be72-90ace03ae4e8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.201667] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679866a6-0b0c-4f77-9b14-ce3f6223edaa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.218594] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "refresh_cache-a09c4492-34fd-4010-b547-bfb5b61f252d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.218751] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquired lock "refresh_cache-a09c4492-34fd-4010-b547-bfb5b61f252d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.218904] env[68638]: DEBUG nova.network.neutron [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 754.220265] env[68638]: DEBUG nova.compute.provider_tree [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 754.392853] env[68638]: DEBUG oslo_vmware.api [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Task: {'id': task-2833488, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.393702} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.393501] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 754.394631] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c807bf-ce5e-4b2d-85d4-e43e53ce54c5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.418376] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] a3b06e32-2670-4381-bb91-4597bfcabaa6/a3b06e32-2670-4381-bb91-4597bfcabaa6.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 754.418696] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1cf51be3-045d-472f-989a-9b4113d5fb25 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.456721] env[68638]: DEBUG oslo_vmware.api [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Waiting for the task: (returnval){ [ 754.456721] env[68638]: value = "task-2833489" [ 754.456721] env[68638]: _type = "Task" [ 754.456721] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.466263] env[68638]: DEBUG oslo_vmware.api [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Task: {'id': task-2833489, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.566480] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833487, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.771423] env[68638]: DEBUG nova.scheduler.client.report [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 73 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 754.771423] env[68638]: DEBUG nova.compute.provider_tree [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 73 to 74 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 754.771552] env[68638]: DEBUG nova.compute.provider_tree [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 754.775826] env[68638]: DEBUG nova.network.neutron [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 754.977733] env[68638]: DEBUG oslo_vmware.api [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Task: {'id': task-2833489, 'name': ReconfigVM_Task, 'duration_secs': 0.322809} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.978330] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Reconfigured VM instance instance-0000002e to attach disk [datastore2] a3b06e32-2670-4381-bb91-4597bfcabaa6/a3b06e32-2670-4381-bb91-4597bfcabaa6.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 754.978714] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-96c2f1db-373e-4193-9ebd-e9eed8b20cfd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.989632] env[68638]: DEBUG oslo_vmware.api [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Waiting for the task: (returnval){ [ 754.989632] env[68638]: value = "task-2833491" [ 754.989632] env[68638]: _type = "Task" [ 754.989632] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.998631] env[68638]: DEBUG nova.compute.manager [req-574cc252-2810-40e5-8f42-83eefb1e467b req-ac539625-5406-488f-9055-899c68747f25 service nova] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Received event network-vif-deleted-906771db-1cab-44ee-b119-40a19f3597df {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 754.998839] env[68638]: DEBUG nova.compute.manager [req-574cc252-2810-40e5-8f42-83eefb1e467b req-ac539625-5406-488f-9055-899c68747f25 service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Received event network-changed-8407d492-d594-4996-8547-bfe5c27586e3 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 754.998999] env[68638]: DEBUG nova.compute.manager [req-574cc252-2810-40e5-8f42-83eefb1e467b req-ac539625-5406-488f-9055-899c68747f25 service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Refreshing instance network info cache due to event network-changed-8407d492-d594-4996-8547-bfe5c27586e3. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 754.999654] env[68638]: DEBUG oslo_concurrency.lockutils [req-574cc252-2810-40e5-8f42-83eefb1e467b req-ac539625-5406-488f-9055-899c68747f25 service nova] Acquiring lock "refresh_cache-5294e1b6-f34f-4f91-aa3e-e0276ad982ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.999809] env[68638]: DEBUG oslo_concurrency.lockutils [req-574cc252-2810-40e5-8f42-83eefb1e467b req-ac539625-5406-488f-9055-899c68747f25 service nova] Acquired lock "refresh_cache-5294e1b6-f34f-4f91-aa3e-e0276ad982ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.999974] env[68638]: DEBUG nova.network.neutron [req-574cc252-2810-40e5-8f42-83eefb1e467b req-ac539625-5406-488f-9055-899c68747f25 service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Refreshing network info cache for port 8407d492-d594-4996-8547-bfe5c27586e3 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 755.007437] env[68638]: DEBUG oslo_vmware.api [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Task: {'id': task-2833491, 'name': Rename_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.060584] env[68638]: DEBUG oslo_vmware.api [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833487, 'name': PowerOnVM_Task, 'duration_secs': 1.052139} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.060858] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 755.061071] env[68638]: INFO nova.compute.manager [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Took 10.60 seconds to spawn the instance on the hypervisor. [ 755.061253] env[68638]: DEBUG nova.compute.manager [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 755.062174] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63fddd3-287f-450f-bd6a-4ad9d35fba1a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.083622] env[68638]: DEBUG nova.network.neutron [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Updating instance_info_cache with network_info: [{"id": "0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce", "address": "fa:16:3e:e4:6e:8f", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.107", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e7e9cd6-7e", "ovs_interfaceid": "0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.279029] env[68638]: DEBUG oslo_concurrency.lockutils [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.353s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.286846] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.666s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.287165] env[68638]: DEBUG nova.objects.instance [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Lazy-loading 'resources' on Instance uuid f43dae1e-3442-450a-b9e8-3884504a2b38 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 755.315638] env[68638]: INFO nova.scheduler.client.report [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Deleted allocations for instance 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248 [ 755.506929] env[68638]: DEBUG oslo_vmware.api [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Task: {'id': task-2833491, 'name': Rename_Task, 'duration_secs': 0.154429} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.507571] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 755.508281] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-09cf55b1-191f-4fb0-a6d8-aec4119a499b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.526040] env[68638]: DEBUG oslo_vmware.api [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Waiting for the task: (returnval){ [ 755.526040] env[68638]: value = "task-2833492" [ 755.526040] env[68638]: _type = "Task" [ 755.526040] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.536728] env[68638]: DEBUG oslo_vmware.api [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Task: {'id': task-2833492, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.582009] env[68638]: INFO nova.compute.manager [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Took 46.02 seconds to build instance. [ 755.587303] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Releasing lock "refresh_cache-a09c4492-34fd-4010-b547-bfb5b61f252d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.587483] env[68638]: DEBUG nova.compute.manager [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Instance network_info: |[{"id": "0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce", "address": "fa:16:3e:e4:6e:8f", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.107", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e7e9cd6-7e", "ovs_interfaceid": "0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 755.587890] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:6e:8f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '205fb402-8eaf-4b61-8f57-8f216024179a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 755.598081] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 755.602529] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 755.602529] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-71e3bcfd-8a10-4ed0-afc7-0e1093ffbefe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.624812] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 755.624812] env[68638]: value = "task-2833493" [ 755.624812] env[68638]: _type = "Task" [ 755.624812] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.636959] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833493, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.832891] env[68638]: DEBUG oslo_concurrency.lockutils [None req-df7facdb-e4a4-4ecd-992f-32e02eca3e30 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "8fe9ba7e-021c-4b0f-a9ba-df7a6b753248" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.207s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.966290] env[68638]: DEBUG nova.network.neutron [req-574cc252-2810-40e5-8f42-83eefb1e467b req-ac539625-5406-488f-9055-899c68747f25 service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Updated VIF entry in instance network info cache for port 8407d492-d594-4996-8547-bfe5c27586e3. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 755.967679] env[68638]: DEBUG nova.network.neutron [req-574cc252-2810-40e5-8f42-83eefb1e467b req-ac539625-5406-488f-9055-899c68747f25 service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Updating instance_info_cache with network_info: [{"id": "8407d492-d594-4996-8547-bfe5c27586e3", "address": "fa:16:3e:0a:8a:54", "network": {"id": "afb24962-d875-4d95-b711-2f9a3b84dddd", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1720924302-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b63d2e4e9fe24cc1aeb4b1569517ea20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8407d492-d5", "ovs_interfaceid": "8407d492-d594-4996-8547-bfe5c27586e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.034220] env[68638]: DEBUG nova.compute.manager [req-6c0eeabb-594f-4b79-a8b9-634b4bd419f9 req-77191b86-774f-43d3-8b29-08183e585d6c service nova] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Received event network-changed-0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 756.034581] env[68638]: DEBUG nova.compute.manager [req-6c0eeabb-594f-4b79-a8b9-634b4bd419f9 req-77191b86-774f-43d3-8b29-08183e585d6c service nova] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Refreshing instance network info cache due to event network-changed-0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 756.034654] env[68638]: DEBUG oslo_concurrency.lockutils [req-6c0eeabb-594f-4b79-a8b9-634b4bd419f9 req-77191b86-774f-43d3-8b29-08183e585d6c service nova] Acquiring lock "refresh_cache-a09c4492-34fd-4010-b547-bfb5b61f252d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.034806] env[68638]: DEBUG oslo_concurrency.lockutils [req-6c0eeabb-594f-4b79-a8b9-634b4bd419f9 req-77191b86-774f-43d3-8b29-08183e585d6c service nova] Acquired lock "refresh_cache-a09c4492-34fd-4010-b547-bfb5b61f252d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 756.034998] env[68638]: DEBUG nova.network.neutron [req-6c0eeabb-594f-4b79-a8b9-634b4bd419f9 req-77191b86-774f-43d3-8b29-08183e585d6c service nova] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Refreshing network info cache for port 0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 756.044174] env[68638]: DEBUG oslo_vmware.api [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Task: {'id': task-2833492, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.090389] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c36118b5-f21f-41b2-8c20-0a13d8a6d339 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "2450602a-fde7-4a65-b7a2-be4195077758" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.151s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 756.139274] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833493, 'name': CreateVM_Task, 'duration_secs': 0.399168} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.139274] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 756.139644] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.140024] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 756.140505] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 756.143150] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e149e319-f23a-44c0-8789-e2b5a1c175fb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.152683] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 756.152683] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]520d7184-9162-351e-8f44-835f781e9730" [ 756.152683] env[68638]: _type = "Task" [ 756.152683] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.163686] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]520d7184-9162-351e-8f44-835f781e9730, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.399673] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d20c388-b068-4e92-bc8a-d93d8343452e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.407468] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de542d2b-5746-4906-955b-df3e0b88f8f7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.440613] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d23f973-fafb-47b4-a4e4-14178a1a844f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.457555] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9164b11-41df-41a0-b3c7-7a19e5508ca1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.472985] env[68638]: DEBUG oslo_concurrency.lockutils [req-574cc252-2810-40e5-8f42-83eefb1e467b req-ac539625-5406-488f-9055-899c68747f25 service nova] Releasing lock "refresh_cache-5294e1b6-f34f-4f91-aa3e-e0276ad982ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 756.473589] env[68638]: DEBUG nova.compute.provider_tree [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 756.535195] env[68638]: DEBUG oslo_vmware.api [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Task: {'id': task-2833492, 'name': PowerOnVM_Task, 'duration_secs': 0.546838} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.535488] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 756.535690] env[68638]: INFO nova.compute.manager [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Took 8.24 seconds to spawn the instance on the hypervisor. [ 756.535868] env[68638]: DEBUG nova.compute.manager [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 756.536668] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d281bf-5477-4baa-8a18-d9a3ffade5bb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.593369] env[68638]: DEBUG nova.compute.manager [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 756.665742] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]520d7184-9162-351e-8f44-835f781e9730, 'name': SearchDatastore_Task, 'duration_secs': 0.029374} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.666200] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 756.666538] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 756.666809] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.666975] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 756.667166] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 756.667460] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b1e1246-e9ff-414c-98b3-af055b991b70 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.679516] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 756.679738] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 756.680558] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14a2cd29-fa10-4b52-8f20-f24e9db5bed8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.702216] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 756.702216] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52052383-b360-b6e1-65f7-4e5ea6cf4206" [ 756.702216] env[68638]: _type = "Task" [ 756.702216] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.716671] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52052383-b360-b6e1-65f7-4e5ea6cf4206, 'name': SearchDatastore_Task, 'duration_secs': 0.011907} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.719566] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbd1c225-2c52-4c3f-87df-4b876952bf91 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.729014] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 756.729014] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]526f6411-c3cd-98d0-5531-6176ad7675d2" [ 756.729014] env[68638]: _type = "Task" [ 756.729014] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.738955] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]526f6411-c3cd-98d0-5531-6176ad7675d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.840898] env[68638]: DEBUG nova.network.neutron [req-6c0eeabb-594f-4b79-a8b9-634b4bd419f9 req-77191b86-774f-43d3-8b29-08183e585d6c service nova] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Updated VIF entry in instance network info cache for port 0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 756.840898] env[68638]: DEBUG nova.network.neutron [req-6c0eeabb-594f-4b79-a8b9-634b4bd419f9 req-77191b86-774f-43d3-8b29-08183e585d6c service nova] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Updating instance_info_cache with network_info: [{"id": "0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce", "address": "fa:16:3e:e4:6e:8f", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.107", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e7e9cd6-7e", "ovs_interfaceid": "0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.977490] env[68638]: DEBUG nova.scheduler.client.report [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 757.058426] env[68638]: INFO nova.compute.manager [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Took 45.16 seconds to build instance. [ 757.114696] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 757.198366] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "1946baab-bb48-4138-8db6-1f530e432c3d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 757.198634] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "1946baab-bb48-4138-8db6-1f530e432c3d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 757.198853] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "1946baab-bb48-4138-8db6-1f530e432c3d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 757.199052] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "1946baab-bb48-4138-8db6-1f530e432c3d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 757.199264] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "1946baab-bb48-4138-8db6-1f530e432c3d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.201699] env[68638]: INFO nova.compute.manager [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Terminating instance [ 757.243815] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]526f6411-c3cd-98d0-5531-6176ad7675d2, 'name': SearchDatastore_Task, 'duration_secs': 0.013674} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.244382] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 757.244778] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] a09c4492-34fd-4010-b547-bfb5b61f252d/a09c4492-34fd-4010-b547-bfb5b61f252d.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 757.245450] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d7f5a306-9957-4150-b6cc-29de772cd2ac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.254020] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 757.254020] env[68638]: value = "task-2833495" [ 757.254020] env[68638]: _type = "Task" [ 757.254020] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.267463] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833495, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.343384] env[68638]: DEBUG oslo_concurrency.lockutils [req-6c0eeabb-594f-4b79-a8b9-634b4bd419f9 req-77191b86-774f-43d3-8b29-08183e585d6c service nova] Releasing lock "refresh_cache-a09c4492-34fd-4010-b547-bfb5b61f252d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 757.468506] env[68638]: DEBUG nova.compute.manager [req-644a7e1f-a7bd-45de-aaa1-139523bb149d req-41a2b9d1-8111-46ca-ad3b-662a9ef29153 service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Received event network-changed-8407d492-d594-4996-8547-bfe5c27586e3 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 757.468639] env[68638]: DEBUG nova.compute.manager [req-644a7e1f-a7bd-45de-aaa1-139523bb149d req-41a2b9d1-8111-46ca-ad3b-662a9ef29153 service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Refreshing instance network info cache due to event network-changed-8407d492-d594-4996-8547-bfe5c27586e3. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 757.469379] env[68638]: DEBUG oslo_concurrency.lockutils [req-644a7e1f-a7bd-45de-aaa1-139523bb149d req-41a2b9d1-8111-46ca-ad3b-662a9ef29153 service nova] Acquiring lock "refresh_cache-5294e1b6-f34f-4f91-aa3e-e0276ad982ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.469379] env[68638]: DEBUG oslo_concurrency.lockutils [req-644a7e1f-a7bd-45de-aaa1-139523bb149d req-41a2b9d1-8111-46ca-ad3b-662a9ef29153 service nova] Acquired lock "refresh_cache-5294e1b6-f34f-4f91-aa3e-e0276ad982ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.469379] env[68638]: DEBUG nova.network.neutron [req-644a7e1f-a7bd-45de-aaa1-139523bb149d req-41a2b9d1-8111-46ca-ad3b-662a9ef29153 service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Refreshing network info cache for port 8407d492-d594-4996-8547-bfe5c27586e3 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 757.482608] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.196s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.485622] env[68638]: DEBUG oslo_concurrency.lockutils [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.816s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 757.488125] env[68638]: INFO nova.compute.claims [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 757.513438] env[68638]: INFO nova.scheduler.client.report [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Deleted allocations for instance f43dae1e-3442-450a-b9e8-3884504a2b38 [ 757.560565] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2acae99b-cea8-4e22-9fd8-cf45ef81337c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Lock "a3b06e32-2670-4381-bb91-4597bfcabaa6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.418s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.706611] env[68638]: DEBUG nova.compute.manager [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 757.706611] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 757.708123] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-638bd6c7-32a8-4345-9dd1-0a919c295da2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.718079] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 757.718501] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e691f8e9-5c3c-4560-ad1a-f232f699b127 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.733300] env[68638]: DEBUG oslo_vmware.api [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 757.733300] env[68638]: value = "task-2833496" [ 757.733300] env[68638]: _type = "Task" [ 757.733300] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.744907] env[68638]: DEBUG oslo_vmware.api [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833496, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.766521] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833495, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.020731] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3b9ae49-7702-4a4b-a800-40e512758984 tempest-ServersListShow2100Test-797039565 tempest-ServersListShow2100Test-797039565-project-member] Lock "f43dae1e-3442-450a-b9e8-3884504a2b38" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.652s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.066083] env[68638]: DEBUG nova.compute.manager [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 758.227307] env[68638]: DEBUG nova.compute.manager [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 758.227307] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3e7fed-7453-42d8-aeca-890d3b1b7a12 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.247607] env[68638]: DEBUG oslo_vmware.api [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833496, 'name': PowerOffVM_Task, 'duration_secs': 0.440497} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.247667] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 758.247981] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 758.248272] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-35d77919-b2d7-4064-b760-976b24c3b606 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.266762] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833495, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.332642] env[68638]: DEBUG nova.network.neutron [req-644a7e1f-a7bd-45de-aaa1-139523bb149d req-41a2b9d1-8111-46ca-ad3b-662a9ef29153 service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Updated VIF entry in instance network info cache for port 8407d492-d594-4996-8547-bfe5c27586e3. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 758.333099] env[68638]: DEBUG nova.network.neutron [req-644a7e1f-a7bd-45de-aaa1-139523bb149d req-41a2b9d1-8111-46ca-ad3b-662a9ef29153 service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Updating instance_info_cache with network_info: [{"id": "8407d492-d594-4996-8547-bfe5c27586e3", "address": "fa:16:3e:0a:8a:54", "network": {"id": "afb24962-d875-4d95-b711-2f9a3b84dddd", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1720924302-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b63d2e4e9fe24cc1aeb4b1569517ea20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8407d492-d5", "ovs_interfaceid": "8407d492-d594-4996-8547-bfe5c27586e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.475666] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 758.475941] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 758.476162] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Deleting the datastore file [datastore1] 1946baab-bb48-4138-8db6-1f530e432c3d {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 758.476447] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ac8b5e8-b180-4260-b76d-23ecd65540cc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.486155] env[68638]: DEBUG oslo_vmware.api [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for the task: (returnval){ [ 758.486155] env[68638]: value = "task-2833498" [ 758.486155] env[68638]: _type = "Task" [ 758.486155] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.496041] env[68638]: DEBUG oslo_vmware.api [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833498, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.591627] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.743835] env[68638]: INFO nova.compute.manager [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] instance snapshotting [ 758.749391] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a8e3e57-6aae-4b0c-9ea6-ab74530296c7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.776795] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a2329d1-a7ae-47e6-a1f6-8c2052f9f713 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.790675] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833495, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.837111] env[68638]: DEBUG oslo_concurrency.lockutils [req-644a7e1f-a7bd-45de-aaa1-139523bb149d req-41a2b9d1-8111-46ca-ad3b-662a9ef29153 service nova] Releasing lock "refresh_cache-5294e1b6-f34f-4f91-aa3e-e0276ad982ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 758.837523] env[68638]: DEBUG nova.compute.manager [req-644a7e1f-a7bd-45de-aaa1-139523bb149d req-41a2b9d1-8111-46ca-ad3b-662a9ef29153 service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Received event network-changed-8407d492-d594-4996-8547-bfe5c27586e3 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 758.837718] env[68638]: DEBUG nova.compute.manager [req-644a7e1f-a7bd-45de-aaa1-139523bb149d req-41a2b9d1-8111-46ca-ad3b-662a9ef29153 service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Refreshing instance network info cache due to event network-changed-8407d492-d594-4996-8547-bfe5c27586e3. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 758.837955] env[68638]: DEBUG oslo_concurrency.lockutils [req-644a7e1f-a7bd-45de-aaa1-139523bb149d req-41a2b9d1-8111-46ca-ad3b-662a9ef29153 service nova] Acquiring lock "refresh_cache-5294e1b6-f34f-4f91-aa3e-e0276ad982ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.838118] env[68638]: DEBUG oslo_concurrency.lockutils [req-644a7e1f-a7bd-45de-aaa1-139523bb149d req-41a2b9d1-8111-46ca-ad3b-662a9ef29153 service nova] Acquired lock "refresh_cache-5294e1b6-f34f-4f91-aa3e-e0276ad982ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 758.838286] env[68638]: DEBUG nova.network.neutron [req-644a7e1f-a7bd-45de-aaa1-139523bb149d req-41a2b9d1-8111-46ca-ad3b-662a9ef29153 service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Refreshing network info cache for port 8407d492-d594-4996-8547-bfe5c27586e3 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 758.999797] env[68638]: DEBUG oslo_vmware.api [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Task: {'id': task-2833498, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.328537} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.002789] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 759.003200] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 759.007014] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 759.007014] env[68638]: INFO nova.compute.manager [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Took 1.30 seconds to destroy the instance on the hypervisor. [ 759.007014] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 759.007014] env[68638]: DEBUG nova.compute.manager [-] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 759.007014] env[68638]: DEBUG nova.network.neutron [-] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 759.008684] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Acquiring lock "ac0141c2-aef6-4edf-913a-d4a41b502c10" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.008938] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Lock "ac0141c2-aef6-4edf-913a-d4a41b502c10" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 759.009095] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Acquiring lock "ac0141c2-aef6-4edf-913a-d4a41b502c10-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.009267] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Lock "ac0141c2-aef6-4edf-913a-d4a41b502c10-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 759.009424] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Lock "ac0141c2-aef6-4edf-913a-d4a41b502c10-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 759.011658] env[68638]: INFO nova.compute.manager [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Terminating instance [ 759.163184] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fcaf65c-cfa4-4d3d-be8a-cc4ceee87f4f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.171963] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49798a87-07bc-4965-b836-427c2a7371f6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.206087] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d58ae09-2888-44e8-914d-7e6166488c3d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.215310] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b98e1280-8534-49b6-bdbb-cecabba231b7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.229862] env[68638]: DEBUG nova.compute.provider_tree [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 759.282014] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833495, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.60838} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.282854] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] a09c4492-34fd-4010-b547-bfb5b61f252d/a09c4492-34fd-4010-b547-bfb5b61f252d.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 759.282854] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 759.282854] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e5732113-c9e8-47c3-b51e-a947ef706e78 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.290973] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 759.290973] env[68638]: value = "task-2833500" [ 759.290973] env[68638]: _type = "Task" [ 759.290973] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.303562] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Creating Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 759.303562] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833500, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.303562] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f6912555-f574-4aa7-b448-f920736b20be {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.311734] env[68638]: DEBUG oslo_vmware.api [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 759.311734] env[68638]: value = "task-2833501" [ 759.311734] env[68638]: _type = "Task" [ 759.311734] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.322306] env[68638]: DEBUG oslo_vmware.api [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833501, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.526220] env[68638]: DEBUG nova.compute.manager [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 759.526220] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 759.526220] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6677af3-247b-468e-b467-139961f6f782 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.541157] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 759.541157] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-92e6d1d8-461b-468a-a0c2-021b510fe04c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.548604] env[68638]: DEBUG oslo_vmware.api [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Waiting for the task: (returnval){ [ 759.548604] env[68638]: value = "task-2833502" [ 759.548604] env[68638]: _type = "Task" [ 759.548604] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.558138] env[68638]: DEBUG oslo_vmware.api [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Task: {'id': task-2833502, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.611592] env[68638]: DEBUG nova.network.neutron [req-644a7e1f-a7bd-45de-aaa1-139523bb149d req-41a2b9d1-8111-46ca-ad3b-662a9ef29153 service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Updated VIF entry in instance network info cache for port 8407d492-d594-4996-8547-bfe5c27586e3. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 759.612166] env[68638]: DEBUG nova.network.neutron [req-644a7e1f-a7bd-45de-aaa1-139523bb149d req-41a2b9d1-8111-46ca-ad3b-662a9ef29153 service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Updating instance_info_cache with network_info: [{"id": "8407d492-d594-4996-8547-bfe5c27586e3", "address": "fa:16:3e:0a:8a:54", "network": {"id": "afb24962-d875-4d95-b711-2f9a3b84dddd", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1720924302-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b63d2e4e9fe24cc1aeb4b1569517ea20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8407d492-d5", "ovs_interfaceid": "8407d492-d594-4996-8547-bfe5c27586e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.735253] env[68638]: DEBUG nova.scheduler.client.report [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 759.803335] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833500, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.205276} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.803606] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 759.804473] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32451f37-4ebd-412a-a34a-2174d101adb4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.830751] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] a09c4492-34fd-4010-b547-bfb5b61f252d/a09c4492-34fd-4010-b547-bfb5b61f252d.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 759.833654] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f53d3a3-2c47-4479-9f8a-11220ed1632a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.852292] env[68638]: DEBUG nova.compute.manager [req-c6273ed7-2a3a-4e1a-8ba2-47cf3ab6725c req-b3e200bd-f2af-4e2e-8328-9c7f5e447917 service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Received event network-changed-8407d492-d594-4996-8547-bfe5c27586e3 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 759.852461] env[68638]: DEBUG nova.compute.manager [req-c6273ed7-2a3a-4e1a-8ba2-47cf3ab6725c req-b3e200bd-f2af-4e2e-8328-9c7f5e447917 service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Refreshing instance network info cache due to event network-changed-8407d492-d594-4996-8547-bfe5c27586e3. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 759.852710] env[68638]: DEBUG oslo_concurrency.lockutils [req-c6273ed7-2a3a-4e1a-8ba2-47cf3ab6725c req-b3e200bd-f2af-4e2e-8328-9c7f5e447917 service nova] Acquiring lock "refresh_cache-5294e1b6-f34f-4f91-aa3e-e0276ad982ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.858794] env[68638]: DEBUG oslo_vmware.api [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833501, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.860342] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 759.860342] env[68638]: value = "task-2833503" [ 759.860342] env[68638]: _type = "Task" [ 759.860342] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.872028] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833503, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.978307] env[68638]: DEBUG nova.compute.manager [req-25520607-deee-4c40-abb4-90e72ef52aa5 req-528e8e48-ed5c-41e4-80cd-f04c4755a61d service nova] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Received event network-vif-deleted-c94367eb-4dac-4137-92b7-00d32ad0be7c {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 759.978683] env[68638]: INFO nova.compute.manager [req-25520607-deee-4c40-abb4-90e72ef52aa5 req-528e8e48-ed5c-41e4-80cd-f04c4755a61d service nova] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Neutron deleted interface c94367eb-4dac-4137-92b7-00d32ad0be7c; detaching it from the instance and deleting it from the info cache [ 759.978787] env[68638]: DEBUG nova.network.neutron [req-25520607-deee-4c40-abb4-90e72ef52aa5 req-528e8e48-ed5c-41e4-80cd-f04c4755a61d service nova] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.063509] env[68638]: DEBUG oslo_vmware.api [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Task: {'id': task-2833502, 'name': PowerOffVM_Task, 'duration_secs': 0.307165} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.064497] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 760.064497] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 760.064497] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79df477f-7335-47e9-800d-35dc6b448338 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.089621] env[68638]: DEBUG nova.network.neutron [-] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.114899] env[68638]: DEBUG oslo_concurrency.lockutils [req-644a7e1f-a7bd-45de-aaa1-139523bb149d req-41a2b9d1-8111-46ca-ad3b-662a9ef29153 service nova] Releasing lock "refresh_cache-5294e1b6-f34f-4f91-aa3e-e0276ad982ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 760.115123] env[68638]: DEBUG oslo_concurrency.lockutils [req-c6273ed7-2a3a-4e1a-8ba2-47cf3ab6725c req-b3e200bd-f2af-4e2e-8328-9c7f5e447917 service nova] Acquired lock "refresh_cache-5294e1b6-f34f-4f91-aa3e-e0276ad982ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 760.115315] env[68638]: DEBUG nova.network.neutron [req-c6273ed7-2a3a-4e1a-8ba2-47cf3ab6725c req-b3e200bd-f2af-4e2e-8328-9c7f5e447917 service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Refreshing network info cache for port 8407d492-d594-4996-8547-bfe5c27586e3 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 760.137364] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 760.137659] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 760.137877] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Deleting the datastore file [datastore1] ac0141c2-aef6-4edf-913a-d4a41b502c10 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 760.138220] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e073cae-719c-4545-b12f-4c6a075fb14c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.148187] env[68638]: DEBUG oslo_vmware.api [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Waiting for the task: (returnval){ [ 760.148187] env[68638]: value = "task-2833505" [ 760.148187] env[68638]: _type = "Task" [ 760.148187] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.159559] env[68638]: DEBUG oslo_vmware.api [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Task: {'id': task-2833505, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.241167] env[68638]: DEBUG oslo_concurrency.lockutils [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.756s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.241816] env[68638]: DEBUG nova.compute.manager [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 760.249022] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.150s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 760.249022] env[68638]: DEBUG nova.objects.instance [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lazy-loading 'resources' on Instance uuid 5a28d684-584b-4e13-9910-183119ce5d37 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 761.042214] env[68638]: INFO nova.compute.manager [-] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Took 2.04 seconds to deallocate network for instance. [ 761.042869] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Acquiring lock "5294e1b6-f34f-4f91-aa3e-e0276ad982ee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.043128] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Lock "5294e1b6-f34f-4f91-aa3e-e0276ad982ee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.043357] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Acquiring lock "5294e1b6-f34f-4f91-aa3e-e0276ad982ee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.044067] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Lock "5294e1b6-f34f-4f91-aa3e-e0276ad982ee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.044067] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Lock "5294e1b6-f34f-4f91-aa3e-e0276ad982ee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.046398] env[68638]: DEBUG nova.compute.utils [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 761.053185] env[68638]: INFO nova.compute.manager [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Terminating instance [ 761.061106] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9819104f-bd32-4551-bd6c-63268153a819 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.065782] env[68638]: DEBUG nova.compute.manager [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 761.065968] env[68638]: DEBUG nova.network.neutron [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 761.073026] env[68638]: DEBUG oslo_concurrency.lockutils [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Acquiring lock "a3b06e32-2670-4381-bb91-4597bfcabaa6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.073026] env[68638]: DEBUG oslo_concurrency.lockutils [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Lock "a3b06e32-2670-4381-bb91-4597bfcabaa6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.073026] env[68638]: DEBUG oslo_concurrency.lockutils [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Acquiring lock "a3b06e32-2670-4381-bb91-4597bfcabaa6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.073026] env[68638]: DEBUG oslo_concurrency.lockutils [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Lock "a3b06e32-2670-4381-bb91-4597bfcabaa6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.073259] env[68638]: DEBUG oslo_concurrency.lockutils [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Lock "a3b06e32-2670-4381-bb91-4597bfcabaa6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.077659] env[68638]: INFO nova.compute.manager [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Terminating instance [ 761.084023] env[68638]: DEBUG oslo_vmware.api [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833501, 'name': CreateSnapshot_Task, 'duration_secs': 1.177477} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.091718] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Created Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 761.097020] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833503, 'name': ReconfigVM_Task, 'duration_secs': 0.467381} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.097020] env[68638]: DEBUG oslo_vmware.api [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Task: {'id': task-2833505, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.250846} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.097020] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa05541f-2306-4c90-a951-d93ec242ba22 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.099097] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeb4fd2e-550c-4fd9-a7b0-4d85971c9256 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.110047] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Reconfigured VM instance instance-0000002f to attach disk [datastore2] a09c4492-34fd-4010-b547-bfb5b61f252d/a09c4492-34fd-4010-b547-bfb5b61f252d.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 761.110897] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 761.111029] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 761.111224] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 761.111435] env[68638]: INFO nova.compute.manager [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Took 1.59 seconds to destroy the instance on the hypervisor. [ 761.111672] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 761.115251] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3d9ee8ee-df1d-42f8-a861-c0a85d4178c9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.116883] env[68638]: DEBUG nova.compute.manager [-] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 761.116977] env[68638]: DEBUG nova.network.neutron [-] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 761.130647] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 761.130647] env[68638]: value = "task-2833506" [ 761.130647] env[68638]: _type = "Task" [ 761.130647] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.149106] env[68638]: DEBUG nova.compute.manager [req-25520607-deee-4c40-abb4-90e72ef52aa5 req-528e8e48-ed5c-41e4-80cd-f04c4755a61d service nova] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Detach interface failed, port_id=c94367eb-4dac-4137-92b7-00d32ad0be7c, reason: Instance 1946baab-bb48-4138-8db6-1f530e432c3d could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 761.153561] env[68638]: DEBUG nova.policy [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9b668c4c9d8744b5b068ee3cbac20d7c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '023c6f361e7c486a9a75b69ea8cae208', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 761.164020] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833506, 'name': Rename_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.549778] env[68638]: DEBUG nova.network.neutron [req-c6273ed7-2a3a-4e1a-8ba2-47cf3ab6725c req-b3e200bd-f2af-4e2e-8328-9c7f5e447917 service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Updated VIF entry in instance network info cache for port 8407d492-d594-4996-8547-bfe5c27586e3. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 761.550149] env[68638]: DEBUG nova.network.neutron [req-c6273ed7-2a3a-4e1a-8ba2-47cf3ab6725c req-b3e200bd-f2af-4e2e-8328-9c7f5e447917 service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Updating instance_info_cache with network_info: [{"id": "8407d492-d594-4996-8547-bfe5c27586e3", "address": "fa:16:3e:0a:8a:54", "network": {"id": "afb24962-d875-4d95-b711-2f9a3b84dddd", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1720924302-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "b63d2e4e9fe24cc1aeb4b1569517ea20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ab9e5e6-9bf8-4a8d-91c8-d22148e3d2ee", "external-id": "nsx-vlan-transportzone-401", "segmentation_id": 401, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8407d492-d5", "ovs_interfaceid": "8407d492-d594-4996-8547-bfe5c27586e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.554290] env[68638]: DEBUG nova.compute.manager [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 761.577138] env[68638]: DEBUG nova.compute.manager [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 761.577138] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 761.577940] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.579086] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd898dd1-350f-4418-9ef4-c51a65b7a4f6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.583820] env[68638]: DEBUG nova.network.neutron [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Successfully created port: 83007aec-935b-4f0b-9797-0a3e4b7435e7 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 761.588020] env[68638]: DEBUG nova.compute.manager [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 761.588204] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 761.592507] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7943a25-a7e7-4ff3-bf95-e202588ae091 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.599529] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 761.600883] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-04bc5b03-b15b-41fa-9086-1386deae1035 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.605256] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 761.605516] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aae61069-5fe3-45fd-9b7f-ae34482ad461 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.609188] env[68638]: DEBUG oslo_vmware.api [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Waiting for the task: (returnval){ [ 761.609188] env[68638]: value = "task-2833507" [ 761.609188] env[68638]: _type = "Task" [ 761.609188] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.618851] env[68638]: DEBUG oslo_vmware.api [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Waiting for the task: (returnval){ [ 761.618851] env[68638]: value = "task-2833508" [ 761.618851] env[68638]: _type = "Task" [ 761.618851] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.629170] env[68638]: DEBUG oslo_vmware.api [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833507, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.642260] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Creating linked-clone VM from snapshot {{(pid=68638) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 761.642575] env[68638]: DEBUG oslo_vmware.api [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Task: {'id': task-2833508, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.645868] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ee70f573-4035-4f58-a673-bd81c3436939 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.660861] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833506, 'name': Rename_Task, 'duration_secs': 0.298109} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.662134] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 761.662485] env[68638]: DEBUG oslo_vmware.api [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 761.662485] env[68638]: value = "task-2833509" [ 761.662485] env[68638]: _type = "Task" [ 761.662485] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.663093] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ca728e78-5748-4430-aa89-82220720ce78 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.678870] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 761.678870] env[68638]: value = "task-2833510" [ 761.678870] env[68638]: _type = "Task" [ 761.678870] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.683225] env[68638]: DEBUG oslo_vmware.api [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833509, 'name': CloneVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.706420] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833510, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.815147] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11bad9ae-bb1a-4ad0-a74e-e116dfc959e5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.825315] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-122a1d3c-f544-4447-9146-78d14027987b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.871909] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79da5358-beb1-4ad0-8fe8-5e504116ddc1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.882267] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62f3dbc1-cddb-474f-a03a-726db2d19537 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.898997] env[68638]: DEBUG nova.compute.provider_tree [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 762.054463] env[68638]: DEBUG oslo_concurrency.lockutils [req-c6273ed7-2a3a-4e1a-8ba2-47cf3ab6725c req-b3e200bd-f2af-4e2e-8328-9c7f5e447917 service nova] Releasing lock "refresh_cache-5294e1b6-f34f-4f91-aa3e-e0276ad982ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 762.060654] env[68638]: INFO nova.virt.block_device [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Booting with volume a3de97f5-fa56-44b0-81e5-346fb44dddb0 at /dev/sda [ 762.117632] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9402d331-32f8-4240-b319-0c1b360d0355 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.127354] env[68638]: DEBUG oslo_vmware.api [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833507, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.131608] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d70981-303f-4d3e-9199-ffc30d57f27a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.151569] env[68638]: DEBUG oslo_vmware.api [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Task: {'id': task-2833508, 'name': PowerOffVM_Task, 'duration_secs': 0.262662} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.151916] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 762.152747] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 762.153250] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef030792-1303-4830-8ac9-4642ce3a525e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.189493] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a88ec5ee-6b19-442d-bd31-ad80eccbc6b3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.195239] env[68638]: DEBUG oslo_vmware.api [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833509, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.201556] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833510, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.206240] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f44c4cfc-64db-48eb-9a2c-b69d8c9d9ecc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.243787] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eedf50f-f488-4b71-8ae1-3a442e1e3734 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.249161] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 762.249389] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 762.249593] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Deleting the datastore file [datastore2] a3b06e32-2670-4381-bb91-4597bfcabaa6 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 762.250879] env[68638]: DEBUG nova.compute.manager [req-7536f52b-5401-4db1-9600-6ad787fe69dc req-9854f75e-69e5-42a7-a9af-e9b0211e998c service nova] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Received event network-vif-deleted-e2ae854b-e47d-4298-8a7e-1a2f6c3e3206 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 762.251064] env[68638]: INFO nova.compute.manager [req-7536f52b-5401-4db1-9600-6ad787fe69dc req-9854f75e-69e5-42a7-a9af-e9b0211e998c service nova] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Neutron deleted interface e2ae854b-e47d-4298-8a7e-1a2f6c3e3206; detaching it from the instance and deleting it from the info cache [ 762.251287] env[68638]: DEBUG nova.network.neutron [req-7536f52b-5401-4db1-9600-6ad787fe69dc req-9854f75e-69e5-42a7-a9af-e9b0211e998c service nova] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.252788] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9897e71c-2d88-4065-ac5b-3d688e0bf03b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.259151] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182a5e06-93c8-4a12-b606-5ddd1e4a4bc9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.264092] env[68638]: DEBUG oslo_vmware.api [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Waiting for the task: (returnval){ [ 762.264092] env[68638]: value = "task-2833512" [ 762.264092] env[68638]: _type = "Task" [ 762.264092] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.276473] env[68638]: DEBUG nova.virt.block_device [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Updating existing volume attachment record: 04c7372b-a8d0-4b24-882d-8396a76793ec {{(pid=68638) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 762.283955] env[68638]: DEBUG oslo_vmware.api [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Task: {'id': task-2833512, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.391383] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Acquiring lock "02894a47-59b1-475b-b934-c8d0b6dabc5b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.391629] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Lock "02894a47-59b1-475b-b934-c8d0b6dabc5b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.423518] env[68638]: DEBUG nova.network.neutron [-] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.425167] env[68638]: ERROR nova.scheduler.client.report [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [req-df431b0c-5605-4d00-8861-c431b0a25b98] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-df431b0c-5605-4d00-8861-c431b0a25b98"}]} [ 762.450987] env[68638]: DEBUG nova.scheduler.client.report [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 762.477372] env[68638]: DEBUG nova.scheduler.client.report [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 762.477623] env[68638]: DEBUG nova.compute.provider_tree [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 762.483932] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe26bc22-b24c-469e-9a35-ba4ff7c45bbb tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "1b176c5d-e77c-410b-b282-b7bba65359a9" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.484196] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe26bc22-b24c-469e-9a35-ba4ff7c45bbb tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "1b176c5d-e77c-410b-b282-b7bba65359a9" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.494756] env[68638]: DEBUG nova.scheduler.client.report [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 762.519505] env[68638]: DEBUG nova.scheduler.client.report [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 762.625663] env[68638]: DEBUG oslo_vmware.api [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833507, 'name': PowerOffVM_Task, 'duration_secs': 0.572785} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.625968] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 762.626172] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 762.626492] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f129423-67f3-40a2-b746-a9c40e72a2b9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.678270] env[68638]: DEBUG oslo_vmware.api [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833509, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.695075] env[68638]: DEBUG oslo_vmware.api [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833510, 'name': PowerOnVM_Task, 'duration_secs': 0.836474} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.697881] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 762.698173] env[68638]: INFO nova.compute.manager [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Took 10.14 seconds to spawn the instance on the hypervisor. [ 762.698362] env[68638]: DEBUG nova.compute.manager [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 762.700672] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f6eb333-828d-44e2-9105-78e474ed6dff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.703206] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 762.703388] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 762.703574] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Deleting the datastore file [datastore2] 5294e1b6-f34f-4f91-aa3e-e0276ad982ee {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 762.703818] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bef9391c-680d-4b7f-b98f-3c784baf4f15 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.717749] env[68638]: DEBUG oslo_vmware.api [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Waiting for the task: (returnval){ [ 762.717749] env[68638]: value = "task-2833514" [ 762.717749] env[68638]: _type = "Task" [ 762.717749] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.728480] env[68638]: DEBUG oslo_vmware.api [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833514, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.761604] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f33f679c-83b0-4b51-9da1-22d4073f19e4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.773949] env[68638]: DEBUG oslo_vmware.api [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Task: {'id': task-2833512, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.204984} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.777262] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 762.777469] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 762.777844] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 762.778056] env[68638]: INFO nova.compute.manager [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Took 1.19 seconds to destroy the instance on the hypervisor. [ 762.778777] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 762.779094] env[68638]: DEBUG nova.compute.manager [-] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 762.779168] env[68638]: DEBUG nova.network.neutron [-] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 762.783336] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51cd1b0a-f84e-46e3-b4eb-5e9603d0c701 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.824550] env[68638]: DEBUG nova.compute.manager [req-7536f52b-5401-4db1-9600-6ad787fe69dc req-9854f75e-69e5-42a7-a9af-e9b0211e998c service nova] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Detach interface failed, port_id=e2ae854b-e47d-4298-8a7e-1a2f6c3e3206, reason: Instance ac0141c2-aef6-4edf-913a-d4a41b502c10 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 762.929052] env[68638]: INFO nova.compute.manager [-] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Took 1.81 seconds to deallocate network for instance. [ 762.987483] env[68638]: DEBUG nova.compute.utils [None req-fe26bc22-b24c-469e-9a35-ba4ff7c45bbb tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 763.091442] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d0c7354-98bb-4956-80f0-6617dded1b26 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.105480] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceb8c62f-dec1-422d-b41d-bede60201229 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.147724] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8486710f-4b47-4355-b53f-8396cded9bcb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.156844] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1296cf46-5f4b-418f-ba84-dac3f7115519 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.174612] env[68638]: DEBUG nova.compute.provider_tree [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 763.189485] env[68638]: DEBUG oslo_vmware.api [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833509, 'name': CloneVM_Task} progress is 95%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.236146] env[68638]: INFO nova.compute.manager [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Took 49.72 seconds to build instance. [ 763.242054] env[68638]: DEBUG oslo_vmware.api [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Task: {'id': task-2833514, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.322955} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.242388] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 763.242601] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 763.242817] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 763.243056] env[68638]: INFO nova.compute.manager [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Took 1.67 seconds to destroy the instance on the hypervisor. [ 763.243351] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 763.243643] env[68638]: DEBUG nova.compute.manager [-] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 763.244247] env[68638]: DEBUG nova.network.neutron [-] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 763.296870] env[68638]: DEBUG nova.network.neutron [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Successfully updated port: 83007aec-935b-4f0b-9797-0a3e4b7435e7 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 763.436817] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 763.491361] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe26bc22-b24c-469e-9a35-ba4ff7c45bbb tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "1b176c5d-e77c-410b-b282-b7bba65359a9" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.686706] env[68638]: DEBUG oslo_vmware.api [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833509, 'name': CloneVM_Task, 'duration_secs': 1.703919} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.687067] env[68638]: INFO nova.virt.vmwareapi.vmops [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Created linked-clone VM from snapshot [ 763.688128] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3235ce9-61a6-488f-a6b7-5b27c94e930a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.696612] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Uploading image 5c06a7dc-0846-43cb-a61a-1bdfd115a5c8 {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 763.710979] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Destroying the VM {{(pid=68638) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 763.711796] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-6d6f3266-6fbf-4d44-ac0d-f49ea3725558 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.717599] env[68638]: DEBUG nova.scheduler.client.report [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 75 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 763.717850] env[68638]: DEBUG nova.compute.provider_tree [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 75 to 76 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 763.718085] env[68638]: DEBUG nova.compute.provider_tree [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 763.722967] env[68638]: DEBUG oslo_vmware.api [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 763.722967] env[68638]: value = "task-2833515" [ 763.722967] env[68638]: _type = "Task" [ 763.722967] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.732739] env[68638]: DEBUG oslo_vmware.api [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833515, 'name': Destroy_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.738020] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cc171a15-2bff-4e74-a364-e851fba462d8 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "a09c4492-34fd-4010-b547-bfb5b61f252d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.726s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.800145] env[68638]: DEBUG oslo_concurrency.lockutils [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Acquiring lock "refresh_cache-039edcf8-7908-4be4-8bd3-0b55545b6f7b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.800327] env[68638]: DEBUG oslo_concurrency.lockutils [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Acquired lock "refresh_cache-039edcf8-7908-4be4-8bd3-0b55545b6f7b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 763.800460] env[68638]: DEBUG nova.network.neutron [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 763.879788] env[68638]: DEBUG nova.network.neutron [-] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.931168] env[68638]: DEBUG nova.compute.manager [req-f9d6705b-5c3c-434f-90a9-7b774eaf74c4 req-02fd96e8-bd56-4b47-af7a-3aa5caab6921 service nova] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Received event network-vif-plugged-83007aec-935b-4f0b-9797-0a3e4b7435e7 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 763.931406] env[68638]: DEBUG oslo_concurrency.lockutils [req-f9d6705b-5c3c-434f-90a9-7b774eaf74c4 req-02fd96e8-bd56-4b47-af7a-3aa5caab6921 service nova] Acquiring lock "039edcf8-7908-4be4-8bd3-0b55545b6f7b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 763.931584] env[68638]: DEBUG oslo_concurrency.lockutils [req-f9d6705b-5c3c-434f-90a9-7b774eaf74c4 req-02fd96e8-bd56-4b47-af7a-3aa5caab6921 service nova] Lock "039edcf8-7908-4be4-8bd3-0b55545b6f7b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 763.931771] env[68638]: DEBUG oslo_concurrency.lockutils [req-f9d6705b-5c3c-434f-90a9-7b774eaf74c4 req-02fd96e8-bd56-4b47-af7a-3aa5caab6921 service nova] Lock "039edcf8-7908-4be4-8bd3-0b55545b6f7b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.931927] env[68638]: DEBUG nova.compute.manager [req-f9d6705b-5c3c-434f-90a9-7b774eaf74c4 req-02fd96e8-bd56-4b47-af7a-3aa5caab6921 service nova] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] No waiting events found dispatching network-vif-plugged-83007aec-935b-4f0b-9797-0a3e4b7435e7 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 763.932113] env[68638]: WARNING nova.compute.manager [req-f9d6705b-5c3c-434f-90a9-7b774eaf74c4 req-02fd96e8-bd56-4b47-af7a-3aa5caab6921 service nova] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Received unexpected event network-vif-plugged-83007aec-935b-4f0b-9797-0a3e4b7435e7 for instance with vm_state building and task_state spawning. [ 764.050893] env[68638]: DEBUG nova.network.neutron [-] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.225135] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.979s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 764.227689] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.837s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 764.227929] env[68638]: DEBUG nova.objects.instance [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Lazy-loading 'resources' on Instance uuid 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 764.239172] env[68638]: DEBUG oslo_vmware.api [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833515, 'name': Destroy_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.240726] env[68638]: DEBUG nova.compute.manager [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 764.250141] env[68638]: INFO nova.scheduler.client.report [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Deleted allocations for instance 5a28d684-584b-4e13-9910-183119ce5d37 [ 764.340897] env[68638]: DEBUG nova.network.neutron [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 764.382254] env[68638]: DEBUG nova.compute.manager [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 764.382802] env[68638]: DEBUG nova.virt.hardware [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 764.383087] env[68638]: DEBUG nova.virt.hardware [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 764.383213] env[68638]: DEBUG nova.virt.hardware [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 764.383392] env[68638]: DEBUG nova.virt.hardware [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 764.383539] env[68638]: DEBUG nova.virt.hardware [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 764.383782] env[68638]: DEBUG nova.virt.hardware [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 764.383894] env[68638]: DEBUG nova.virt.hardware [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 764.384077] env[68638]: DEBUG nova.virt.hardware [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 764.384251] env[68638]: DEBUG nova.virt.hardware [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 764.384414] env[68638]: DEBUG nova.virt.hardware [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 764.384587] env[68638]: DEBUG nova.virt.hardware [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 764.384968] env[68638]: INFO nova.compute.manager [-] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Took 1.61 seconds to deallocate network for instance. [ 764.385787] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84d7eab-3606-4921-b722-52ae9b9c0018 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.397391] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-970e0d9f-19f2-484a-a1c0-e945940bca45 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.519223] env[68638]: DEBUG nova.network.neutron [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Updating instance_info_cache with network_info: [{"id": "83007aec-935b-4f0b-9797-0a3e4b7435e7", "address": "fa:16:3e:dc:51:32", "network": {"id": "d3055f46-c454-41cb-b05b-5a7300fb8ab4", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-516232156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "023c6f361e7c486a9a75b69ea8cae208", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83007aec-93", "ovs_interfaceid": "83007aec-935b-4f0b-9797-0a3e4b7435e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.532232] env[68638]: DEBUG nova.compute.manager [req-5dee6031-3264-48e4-964b-59cb1a0b74e8 req-10daba0f-53cf-4bd3-8a24-0a3af33052ab service nova] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Received event network-vif-deleted-e84b052e-af7e-4eb2-8c0c-08e6c66a2707 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 764.532232] env[68638]: DEBUG nova.compute.manager [req-5dee6031-3264-48e4-964b-59cb1a0b74e8 req-10daba0f-53cf-4bd3-8a24-0a3af33052ab service nova] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Received event network-vif-deleted-8407d492-d594-4996-8547-bfe5c27586e3 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 764.554728] env[68638]: INFO nova.compute.manager [-] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Took 1.31 seconds to deallocate network for instance. [ 764.565169] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe26bc22-b24c-469e-9a35-ba4ff7c45bbb tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "1b176c5d-e77c-410b-b282-b7bba65359a9" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 764.565511] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe26bc22-b24c-469e-9a35-ba4ff7c45bbb tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "1b176c5d-e77c-410b-b282-b7bba65359a9" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 764.569175] env[68638]: INFO nova.compute.manager [None req-fe26bc22-b24c-469e-9a35-ba4ff7c45bbb tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Attaching volume e57502e7-aa0f-4e7b-90cd-6099cf70f48c to /dev/sdb [ 764.603534] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f53bebaa-f053-4414-b0c6-ae93f603df5c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.612285] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb082ff-004d-48a2-bb46-0f042bc23b5b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.629394] env[68638]: DEBUG nova.virt.block_device [None req-fe26bc22-b24c-469e-9a35-ba4ff7c45bbb tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Updating existing volume attachment record: 2b02985c-1a42-4006-8faf-2ad9d6c012d2 {{(pid=68638) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 764.738932] env[68638]: DEBUG oslo_vmware.api [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833515, 'name': Destroy_Task, 'duration_secs': 0.5309} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.739210] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Destroyed the VM [ 764.739522] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Deleting Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 764.739678] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-731e9db9-80eb-4795-b1ae-ac7994c62b50 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.748163] env[68638]: DEBUG oslo_vmware.api [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 764.748163] env[68638]: value = "task-2833516" [ 764.748163] env[68638]: _type = "Task" [ 764.748163] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.760584] env[68638]: DEBUG oslo_vmware.api [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833516, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.763554] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8e81390c-9bae-43fb-8dec-24b834986161 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "5a28d684-584b-4e13-9910-183119ce5d37" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.530s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 764.768023] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 764.894338] env[68638]: DEBUG oslo_concurrency.lockutils [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.023397] env[68638]: DEBUG oslo_concurrency.lockutils [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Releasing lock "refresh_cache-039edcf8-7908-4be4-8bd3-0b55545b6f7b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 765.023631] env[68638]: DEBUG nova.compute.manager [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Instance network_info: |[{"id": "83007aec-935b-4f0b-9797-0a3e4b7435e7", "address": "fa:16:3e:dc:51:32", "network": {"id": "d3055f46-c454-41cb-b05b-5a7300fb8ab4", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-516232156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "023c6f361e7c486a9a75b69ea8cae208", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83007aec-93", "ovs_interfaceid": "83007aec-935b-4f0b-9797-0a3e4b7435e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 765.026858] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:51:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbe1725d-6711-4e92-9a4e-d4802651e7d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '83007aec-935b-4f0b-9797-0a3e4b7435e7', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 765.035988] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Creating folder: Project (023c6f361e7c486a9a75b69ea8cae208). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 765.036301] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a0fa17d-9bdc-4f4e-b552-e61a80143dc8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.053333] env[68638]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 765.054052] env[68638]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=68638) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 765.054052] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Folder already exists: Project (023c6f361e7c486a9a75b69ea8cae208). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 765.054252] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Creating folder: Instances. Parent ref: group-v569813. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 765.054530] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f20a651-9a4d-4dd1-9afd-cdd0dcd2f1bb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.060589] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.067780] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Created folder: Instances in parent group-v569813. [ 765.068050] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 765.068663] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 765.068663] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-68001d5e-8ba0-4d0d-90c3-f2e42adc918e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.095302] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 765.095302] env[68638]: value = "task-2833522" [ 765.095302] env[68638]: _type = "Task" [ 765.095302] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.109556] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833522, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.268334] env[68638]: DEBUG oslo_vmware.api [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833516, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.319913] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c8ba022-1039-482f-acc0-231a16f0597b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.328417] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb21aea6-f798-4beb-a271-3ad21a54da7b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.364637] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48a58dd0-1ff5-4c33-8a76-faf5fcd76820 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.374720] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f17e4684-ba39-49ea-a48f-653b0e662126 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.390397] env[68638]: DEBUG nova.compute.provider_tree [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 765.606357] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833522, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.761372] env[68638]: DEBUG oslo_vmware.api [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833516, 'name': RemoveSnapshot_Task, 'duration_secs': 0.780945} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.761706] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Deleted Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 765.894064] env[68638]: DEBUG nova.scheduler.client.report [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 765.956107] env[68638]: DEBUG nova.compute.manager [req-ec9da894-0c87-4400-82a4-cd6841b7329a req-d37d6e8c-d0c2-4916-a68c-bd1dab687877 service nova] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Received event network-changed-83007aec-935b-4f0b-9797-0a3e4b7435e7 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 765.956324] env[68638]: DEBUG nova.compute.manager [req-ec9da894-0c87-4400-82a4-cd6841b7329a req-d37d6e8c-d0c2-4916-a68c-bd1dab687877 service nova] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Refreshing instance network info cache due to event network-changed-83007aec-935b-4f0b-9797-0a3e4b7435e7. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 765.956541] env[68638]: DEBUG oslo_concurrency.lockutils [req-ec9da894-0c87-4400-82a4-cd6841b7329a req-d37d6e8c-d0c2-4916-a68c-bd1dab687877 service nova] Acquiring lock "refresh_cache-039edcf8-7908-4be4-8bd3-0b55545b6f7b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.956684] env[68638]: DEBUG oslo_concurrency.lockutils [req-ec9da894-0c87-4400-82a4-cd6841b7329a req-d37d6e8c-d0c2-4916-a68c-bd1dab687877 service nova] Acquired lock "refresh_cache-039edcf8-7908-4be4-8bd3-0b55545b6f7b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 765.957094] env[68638]: DEBUG nova.network.neutron [req-ec9da894-0c87-4400-82a4-cd6841b7329a req-d37d6e8c-d0c2-4916-a68c-bd1dab687877 service nova] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Refreshing network info cache for port 83007aec-935b-4f0b-9797-0a3e4b7435e7 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 766.042768] env[68638]: DEBUG nova.compute.manager [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Stashing vm_state: active {{(pid=68638) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 766.106698] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833522, 'name': CreateVM_Task, 'duration_secs': 0.715593} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.106875] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 766.107564] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sda', 'attachment_id': '04c7372b-a8d0-4b24-882d-8396a76793ec', 'device_type': None, 'disk_bus': None, 'delete_on_termination': True, 'guest_format': None, 'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569817', 'volume_id': 'a3de97f5-fa56-44b0-81e5-346fb44dddb0', 'name': 'volume-a3de97f5-fa56-44b0-81e5-346fb44dddb0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '039edcf8-7908-4be4-8bd3-0b55545b6f7b', 'attached_at': '', 'detached_at': '', 'volume_id': 'a3de97f5-fa56-44b0-81e5-346fb44dddb0', 'serial': 'a3de97f5-fa56-44b0-81e5-346fb44dddb0'}, 'volume_type': None}], 'swap': None} {{(pid=68638) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 766.107770] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Root volume attach. Driver type: vmdk {{(pid=68638) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 766.108552] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c912aed-c255-4958-8491-fad866e7d613 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.116651] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593ce6b9-e583-407e-81c5-787a471484db {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.124590] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2576a3d-61f5-4e95-b5a6-438508957c7c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.130486] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-dae7f289-9feb-4c38-a8f9-79028463c9df {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.138113] env[68638]: DEBUG oslo_vmware.api [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Waiting for the task: (returnval){ [ 766.138113] env[68638]: value = "task-2833523" [ 766.138113] env[68638]: _type = "Task" [ 766.138113] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.146232] env[68638]: DEBUG oslo_vmware.api [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Task: {'id': task-2833523, 'name': RelocateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.267553] env[68638]: WARNING nova.compute.manager [None req-5d47e23e-d232-458a-969c-cbd9df80d9fb tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Image not found during snapshot: nova.exception.ImageNotFound: Image 5c06a7dc-0846-43cb-a61a-1bdfd115a5c8 could not be found. [ 766.399047] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.171s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 766.401751] env[68638]: DEBUG oslo_concurrency.lockutils [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 34.094s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 766.401829] env[68638]: DEBUG nova.objects.instance [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68638) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 766.430028] env[68638]: INFO nova.scheduler.client.report [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Deleted allocations for instance 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4 [ 766.560652] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 766.650272] env[68638]: DEBUG oslo_vmware.api [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Task: {'id': task-2833523, 'name': RelocateVM_Task} progress is 42%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.715349] env[68638]: DEBUG nova.network.neutron [req-ec9da894-0c87-4400-82a4-cd6841b7329a req-d37d6e8c-d0c2-4916-a68c-bd1dab687877 service nova] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Updated VIF entry in instance network info cache for port 83007aec-935b-4f0b-9797-0a3e4b7435e7. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 766.715829] env[68638]: DEBUG nova.network.neutron [req-ec9da894-0c87-4400-82a4-cd6841b7329a req-d37d6e8c-d0c2-4916-a68c-bd1dab687877 service nova] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Updating instance_info_cache with network_info: [{"id": "83007aec-935b-4f0b-9797-0a3e4b7435e7", "address": "fa:16:3e:dc:51:32", "network": {"id": "d3055f46-c454-41cb-b05b-5a7300fb8ab4", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-516232156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "023c6f361e7c486a9a75b69ea8cae208", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83007aec-93", "ovs_interfaceid": "83007aec-935b-4f0b-9797-0a3e4b7435e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.938485] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9c397a15-e7a0-4b81-b9ed-ac46d76ce0d3 tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Lock "4b5c5b9e-389d-4ed9-a860-bd41a33fbac4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.219s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.150270] env[68638]: DEBUG oslo_vmware.api [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Task: {'id': task-2833523, 'name': RelocateVM_Task} progress is 56%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.219816] env[68638]: DEBUG oslo_concurrency.lockutils [req-ec9da894-0c87-4400-82a4-cd6841b7329a req-d37d6e8c-d0c2-4916-a68c-bd1dab687877 service nova] Releasing lock "refresh_cache-039edcf8-7908-4be4-8bd3-0b55545b6f7b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 767.321349] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquiring lock "2450602a-fde7-4a65-b7a2-be4195077758" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.321696] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "2450602a-fde7-4a65-b7a2-be4195077758" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.321948] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquiring lock "2450602a-fde7-4a65-b7a2-be4195077758-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.322209] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "2450602a-fde7-4a65-b7a2-be4195077758-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.322422] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "2450602a-fde7-4a65-b7a2-be4195077758-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.324690] env[68638]: INFO nova.compute.manager [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Terminating instance [ 767.417481] env[68638]: DEBUG oslo_concurrency.lockutils [None req-99f63158-6fdc-48e9-9bc8-cffd8d63ae71 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.420528] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.869s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.421902] env[68638]: INFO nova.compute.claims [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 767.650323] env[68638]: DEBUG oslo_vmware.api [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Task: {'id': task-2833523, 'name': RelocateVM_Task} progress is 69%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.829275] env[68638]: DEBUG nova.compute.manager [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 767.829399] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 767.830472] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82467535-d476-4b76-9725-e961eaefb160 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.841236] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 767.841536] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-81d9e0ad-8822-4348-8623-f3d90758fac0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.851526] env[68638]: DEBUG oslo_vmware.api [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 767.851526] env[68638]: value = "task-2833525" [ 767.851526] env[68638]: _type = "Task" [ 767.851526] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.861799] env[68638]: DEBUG oslo_vmware.api [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833525, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.152451] env[68638]: DEBUG oslo_vmware.api [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Task: {'id': task-2833523, 'name': RelocateVM_Task} progress is 82%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.362022] env[68638]: DEBUG oslo_vmware.api [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833525, 'name': PowerOffVM_Task, 'duration_secs': 0.307632} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.362393] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 768.363036] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 768.363036] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fb94a63c-94d9-4f2c-9ca5-b95e4dc0ac82 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.458865] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 768.459210] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 768.459311] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Deleting the datastore file [datastore1] 2450602a-fde7-4a65-b7a2-be4195077758 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 768.462552] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a709bbb-c6a9-41b4-9925-c82b370321a9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.471165] env[68638]: DEBUG oslo_vmware.api [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 768.471165] env[68638]: value = "task-2833527" [ 768.471165] env[68638]: _type = "Task" [ 768.471165] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.484065] env[68638]: DEBUG oslo_vmware.api [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833527, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.652447] env[68638]: DEBUG oslo_vmware.api [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Task: {'id': task-2833523, 'name': RelocateVM_Task} progress is 97%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.982099] env[68638]: DEBUG oslo_vmware.api [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833527, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.994943] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df9d9db0-9e5d-4d51-ab8c-d653de6d8e2a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.003476] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f34da9e0-ed6f-4e8d-898f-0636d08a5ffc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.037471] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1fe35d9e-93d4-45f5-b487-d94523a74681 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquiring lock "c80895d5-1a59-4779-9da9-9aeec10bc395" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 769.037725] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1fe35d9e-93d4-45f5-b487-d94523a74681 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "c80895d5-1a59-4779-9da9-9aeec10bc395" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 769.041042] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55194374-5525-453a-a4a7-acff85007daa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.051124] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d6999af-483d-4b44-b548-b0e1feca3414 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.070472] env[68638]: DEBUG nova.compute.provider_tree [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 769.152129] env[68638]: DEBUG oslo_vmware.api [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Task: {'id': task-2833523, 'name': RelocateVM_Task} progress is 98%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.178356] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe26bc22-b24c-469e-9a35-ba4ff7c45bbb tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Volume attach. Driver type: vmdk {{(pid=68638) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 769.178627] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe26bc22-b24c-469e-9a35-ba4ff7c45bbb tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569878', 'volume_id': 'e57502e7-aa0f-4e7b-90cd-6099cf70f48c', 'name': 'volume-e57502e7-aa0f-4e7b-90cd-6099cf70f48c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1b176c5d-e77c-410b-b282-b7bba65359a9', 'attached_at': '', 'detached_at': '', 'volume_id': 'e57502e7-aa0f-4e7b-90cd-6099cf70f48c', 'serial': 'e57502e7-aa0f-4e7b-90cd-6099cf70f48c'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 769.179573] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc092d4b-dae0-4d90-ba16-c33968ddd80b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.196169] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba17df1-5cd3-42b0-b813-a706ba0bac9d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.220738] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe26bc22-b24c-469e-9a35-ba4ff7c45bbb tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Reconfiguring VM instance instance-00000024 to attach disk [datastore2] volume-e57502e7-aa0f-4e7b-90cd-6099cf70f48c/volume-e57502e7-aa0f-4e7b-90cd-6099cf70f48c.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 769.221048] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca34f826-8461-4215-8a9e-9750475adc19 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.240104] env[68638]: DEBUG oslo_vmware.api [None req-fe26bc22-b24c-469e-9a35-ba4ff7c45bbb tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 769.240104] env[68638]: value = "task-2833528" [ 769.240104] env[68638]: _type = "Task" [ 769.240104] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.248148] env[68638]: DEBUG oslo_vmware.api [None req-fe26bc22-b24c-469e-9a35-ba4ff7c45bbb tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833528, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.482086] env[68638]: DEBUG oslo_vmware.api [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833527, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.637359} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.482370] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 769.482554] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 769.482726] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 769.482899] env[68638]: INFO nova.compute.manager [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Took 1.65 seconds to destroy the instance on the hypervisor. [ 769.483252] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 769.483454] env[68638]: DEBUG nova.compute.manager [-] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 769.483454] env[68638]: DEBUG nova.network.neutron [-] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 769.545684] env[68638]: DEBUG nova.compute.utils [None req-1fe35d9e-93d4-45f5-b487-d94523a74681 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 769.574749] env[68638]: DEBUG nova.scheduler.client.report [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 769.653385] env[68638]: DEBUG oslo_vmware.api [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Task: {'id': task-2833523, 'name': RelocateVM_Task} progress is 98%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.753405] env[68638]: DEBUG oslo_vmware.api [None req-fe26bc22-b24c-469e-9a35-ba4ff7c45bbb tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833528, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.049361] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1fe35d9e-93d4-45f5-b487-d94523a74681 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "c80895d5-1a59-4779-9da9-9aeec10bc395" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.072551] env[68638]: DEBUG nova.compute.manager [req-13e66838-d6b0-4cdf-9eed-8fc5d533e0e4 req-38de350d-0fcd-4e19-874d-5ee34f97faef service nova] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Received event network-vif-deleted-53351f41-4f72-4547-8bc4-8949546128c2 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 770.072551] env[68638]: INFO nova.compute.manager [req-13e66838-d6b0-4cdf-9eed-8fc5d533e0e4 req-38de350d-0fcd-4e19-874d-5ee34f97faef service nova] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Neutron deleted interface 53351f41-4f72-4547-8bc4-8949546128c2; detaching it from the instance and deleting it from the info cache [ 770.072551] env[68638]: DEBUG nova.network.neutron [req-13e66838-d6b0-4cdf-9eed-8fc5d533e0e4 req-38de350d-0fcd-4e19-874d-5ee34f97faef service nova] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.078269] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.659s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.078916] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.288s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.079155] env[68638]: DEBUG nova.objects.instance [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lazy-loading 'resources' on Instance uuid 24982641-40ec-4fab-8385-1bc9dea6ade1 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 770.153510] env[68638]: DEBUG oslo_vmware.api [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Task: {'id': task-2833523, 'name': RelocateVM_Task, 'duration_secs': 3.967531} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.153838] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Volume attach. Driver type: vmdk {{(pid=68638) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 770.154076] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569817', 'volume_id': 'a3de97f5-fa56-44b0-81e5-346fb44dddb0', 'name': 'volume-a3de97f5-fa56-44b0-81e5-346fb44dddb0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '039edcf8-7908-4be4-8bd3-0b55545b6f7b', 'attached_at': '', 'detached_at': '', 'volume_id': 'a3de97f5-fa56-44b0-81e5-346fb44dddb0', 'serial': 'a3de97f5-fa56-44b0-81e5-346fb44dddb0'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 770.154870] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7899b8ef-9ea3-491e-a763-4bb1dbae3691 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.172803] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f31c07-8e22-46a5-9b36-7b9401892072 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.196078] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] volume-a3de97f5-fa56-44b0-81e5-346fb44dddb0/volume-a3de97f5-fa56-44b0-81e5-346fb44dddb0.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 770.196472] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad1fbfce-bca9-48da-86a5-ba4e990804f7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.218087] env[68638]: DEBUG oslo_vmware.api [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Waiting for the task: (returnval){ [ 770.218087] env[68638]: value = "task-2833529" [ 770.218087] env[68638]: _type = "Task" [ 770.218087] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.227280] env[68638]: DEBUG nova.network.neutron [-] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.228556] env[68638]: DEBUG oslo_vmware.api [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Task: {'id': task-2833529, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.250842] env[68638]: DEBUG oslo_vmware.api [None req-fe26bc22-b24c-469e-9a35-ba4ff7c45bbb tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833528, 'name': ReconfigVM_Task, 'duration_secs': 0.581594} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.251119] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe26bc22-b24c-469e-9a35-ba4ff7c45bbb tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Reconfigured VM instance instance-00000024 to attach disk [datastore2] volume-e57502e7-aa0f-4e7b-90cd-6099cf70f48c/volume-e57502e7-aa0f-4e7b-90cd-6099cf70f48c.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 770.256021] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42348093-005c-4a55-89f6-51da9804ce04 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.273236] env[68638]: DEBUG oslo_vmware.api [None req-fe26bc22-b24c-469e-9a35-ba4ff7c45bbb tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 770.273236] env[68638]: value = "task-2833530" [ 770.273236] env[68638]: _type = "Task" [ 770.273236] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.282142] env[68638]: DEBUG oslo_vmware.api [None req-fe26bc22-b24c-469e-9a35-ba4ff7c45bbb tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833530, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.575029] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b536bde6-9d8c-4c9f-a98c-d9a3b5340cfa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.586067] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Acquiring lock "443de75e-cff2-4998-b494-b465d6641d89" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 770.588138] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Lock "443de75e-cff2-4998-b494-b465d6641d89" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.002s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.592489] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8bf65d-6892-451f-bbd8-d96bd84f4ca7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.630407] env[68638]: DEBUG nova.compute.manager [req-13e66838-d6b0-4cdf-9eed-8fc5d533e0e4 req-38de350d-0fcd-4e19-874d-5ee34f97faef service nova] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Detach interface failed, port_id=53351f41-4f72-4547-8bc4-8949546128c2, reason: Instance 2450602a-fde7-4a65-b7a2-be4195077758 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 770.731240] env[68638]: INFO nova.compute.manager [-] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Took 1.25 seconds to deallocate network for instance. [ 770.731583] env[68638]: DEBUG oslo_vmware.api [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Task: {'id': task-2833529, 'name': ReconfigVM_Task, 'duration_secs': 0.325551} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.735804] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Reconfigured VM instance instance-00000030 to attach disk [datastore2] volume-a3de97f5-fa56-44b0-81e5-346fb44dddb0/volume-a3de97f5-fa56-44b0-81e5-346fb44dddb0.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 770.743704] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc7805e8-896f-48e8-a9a1-1576eb76c950 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.762272] env[68638]: DEBUG oslo_vmware.api [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Waiting for the task: (returnval){ [ 770.762272] env[68638]: value = "task-2833531" [ 770.762272] env[68638]: _type = "Task" [ 770.762272] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.773935] env[68638]: DEBUG oslo_vmware.api [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Task: {'id': task-2833531, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.784577] env[68638]: DEBUG oslo_vmware.api [None req-fe26bc22-b24c-469e-9a35-ba4ff7c45bbb tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833530, 'name': ReconfigVM_Task, 'duration_secs': 0.149286} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.784904] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe26bc22-b24c-469e-9a35-ba4ff7c45bbb tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569878', 'volume_id': 'e57502e7-aa0f-4e7b-90cd-6099cf70f48c', 'name': 'volume-e57502e7-aa0f-4e7b-90cd-6099cf70f48c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1b176c5d-e77c-410b-b282-b7bba65359a9', 'attached_at': '', 'detached_at': '', 'volume_id': 'e57502e7-aa0f-4e7b-90cd-6099cf70f48c', 'serial': 'e57502e7-aa0f-4e7b-90cd-6099cf70f48c'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 770.846184] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Acquiring lock "1eee31b7-db8b-4765-8cc2-4273717ef86e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 770.846445] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Lock "1eee31b7-db8b-4765-8cc2-4273717ef86e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.846651] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Acquiring lock "1eee31b7-db8b-4765-8cc2-4273717ef86e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 770.846829] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Lock "1eee31b7-db8b-4765-8cc2-4273717ef86e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.846998] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Lock "1eee31b7-db8b-4765-8cc2-4273717ef86e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.849881] env[68638]: INFO nova.compute.manager [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Terminating instance [ 771.090877] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1fe35d9e-93d4-45f5-b487-d94523a74681 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquiring lock "c80895d5-1a59-4779-9da9-9aeec10bc395" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 771.091170] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1fe35d9e-93d4-45f5-b487-d94523a74681 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "c80895d5-1a59-4779-9da9-9aeec10bc395" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 771.091416] env[68638]: INFO nova.compute.manager [None req-1fe35d9e-93d4-45f5-b487-d94523a74681 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Attaching volume 2b7daa37-8901-45a9-b233-21a7aa53a70a to /dev/sdb [ 771.103712] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Lock "443de75e-cff2-4998-b494-b465d6641d89" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.516s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.104326] env[68638]: DEBUG nova.compute.manager [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 771.112254] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86f921f-d989-4869-a655-e55e0c890d43 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.121141] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e5d148-57ec-4937-bb9e-543743485b63 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.157095] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05e27095-95a0-4edd-8cd9-5f11c9dea32e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.160151] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc65d840-c90c-418a-96a3-b23e0cf99b06 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.171221] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d536506d-4e4c-4428-92b9-735a804ae9c2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.175726] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cfb8ac7-dee7-4fca-b018-c1cda1db95de {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.191321] env[68638]: DEBUG nova.virt.block_device [None req-1fe35d9e-93d4-45f5-b487-d94523a74681 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Updating existing volume attachment record: 1011909b-d63f-4a17-b85d-80761ca12d3c {{(pid=68638) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 771.202028] env[68638]: DEBUG nova.compute.provider_tree [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 771.254990] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 771.271821] env[68638]: DEBUG oslo_vmware.api [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Task: {'id': task-2833531, 'name': ReconfigVM_Task, 'duration_secs': 0.126675} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.272146] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569817', 'volume_id': 'a3de97f5-fa56-44b0-81e5-346fb44dddb0', 'name': 'volume-a3de97f5-fa56-44b0-81e5-346fb44dddb0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '039edcf8-7908-4be4-8bd3-0b55545b6f7b', 'attached_at': '', 'detached_at': '', 'volume_id': 'a3de97f5-fa56-44b0-81e5-346fb44dddb0', 'serial': 'a3de97f5-fa56-44b0-81e5-346fb44dddb0'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 771.272675] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-36e88037-76a0-458f-966b-15ebb6c62640 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.279637] env[68638]: DEBUG oslo_vmware.api [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Waiting for the task: (returnval){ [ 771.279637] env[68638]: value = "task-2833532" [ 771.279637] env[68638]: _type = "Task" [ 771.279637] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.289885] env[68638]: DEBUG oslo_vmware.api [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Task: {'id': task-2833532, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.353309] env[68638]: DEBUG nova.compute.manager [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 771.353393] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 771.354340] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9de5337-a9e0-4b2f-810d-e06ea7d12f91 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.372420] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 771.372420] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d56125d0-a640-4d95-b0f5-554e7383a2e6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.380114] env[68638]: DEBUG oslo_vmware.api [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Waiting for the task: (returnval){ [ 771.380114] env[68638]: value = "task-2833534" [ 771.380114] env[68638]: _type = "Task" [ 771.380114] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.391040] env[68638]: DEBUG oslo_vmware.api [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Task: {'id': task-2833534, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.610208] env[68638]: DEBUG nova.compute.utils [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 771.613035] env[68638]: DEBUG nova.compute.manager [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 771.613431] env[68638]: DEBUG nova.network.neutron [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 771.671791] env[68638]: DEBUG nova.policy [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13e77db5ee304787b700dada68320c3a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8ea76bfe01094fc98e951d13b1f9876b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 771.705564] env[68638]: DEBUG nova.scheduler.client.report [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 771.756270] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "interface-14772ba8-bde2-42ef-9a37-df876c8af321-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 771.756551] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "interface-14772ba8-bde2-42ef-9a37-df876c8af321-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 771.756921] env[68638]: DEBUG nova.objects.instance [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lazy-loading 'flavor' on Instance uuid 14772ba8-bde2-42ef-9a37-df876c8af321 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 771.791697] env[68638]: DEBUG oslo_vmware.api [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Task: {'id': task-2833532, 'name': Rename_Task, 'duration_secs': 0.160237} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.792031] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 771.792249] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f8225ad6-c83a-4b22-9ae1-5e917fc66a21 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.800204] env[68638]: DEBUG oslo_vmware.api [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Waiting for the task: (returnval){ [ 771.800204] env[68638]: value = "task-2833537" [ 771.800204] env[68638]: _type = "Task" [ 771.800204] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.809697] env[68638]: DEBUG oslo_vmware.api [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Task: {'id': task-2833537, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.821884] env[68638]: DEBUG nova.objects.instance [None req-fe26bc22-b24c-469e-9a35-ba4ff7c45bbb tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lazy-loading 'flavor' on Instance uuid 1b176c5d-e77c-410b-b282-b7bba65359a9 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 771.896480] env[68638]: DEBUG oslo_vmware.api [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Task: {'id': task-2833534, 'name': PowerOffVM_Task, 'duration_secs': 0.223018} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.897038] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 771.897388] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 771.897830] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e1ae6d6a-54f0-4fb7-a584-425b6875e05a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.978171] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 771.978419] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 771.978611] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Deleting the datastore file [datastore1] 1eee31b7-db8b-4765-8cc2-4273717ef86e {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 771.978902] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-23a9fcb9-7f44-4020-8e91-df95cca9f763 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.986600] env[68638]: DEBUG oslo_vmware.api [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Waiting for the task: (returnval){ [ 771.986600] env[68638]: value = "task-2833539" [ 771.986600] env[68638]: _type = "Task" [ 771.986600] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.997791] env[68638]: DEBUG oslo_vmware.api [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Task: {'id': task-2833539, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.027320] env[68638]: DEBUG nova.network.neutron [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Successfully created port: c813e7f7-aecf-49ab-a677-4b2109dce440 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 772.113868] env[68638]: DEBUG nova.compute.manager [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 772.211965] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.133s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.218302] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.399s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 772.220514] env[68638]: INFO nova.compute.claims [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 772.251949] env[68638]: INFO nova.scheduler.client.report [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Deleted allocations for instance 24982641-40ec-4fab-8385-1bc9dea6ade1 [ 772.316845] env[68638]: DEBUG oslo_vmware.api [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Task: {'id': task-2833537, 'name': PowerOnVM_Task, 'duration_secs': 0.504012} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.316845] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 772.316945] env[68638]: INFO nova.compute.manager [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Took 7.93 seconds to spawn the instance on the hypervisor. [ 772.317186] env[68638]: DEBUG nova.compute.manager [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 772.319263] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b32641a1-7695-45f6-b1bd-056b7c6253f9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.332275] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe26bc22-b24c-469e-9a35-ba4ff7c45bbb tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "1b176c5d-e77c-410b-b282-b7bba65359a9" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.767s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.399515] env[68638]: DEBUG nova.objects.instance [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lazy-loading 'pci_requests' on Instance uuid 14772ba8-bde2-42ef-9a37-df876c8af321 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 772.497988] env[68638]: DEBUG oslo_vmware.api [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Task: {'id': task-2833539, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170063} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.498645] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 772.498645] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 772.498645] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 772.498904] env[68638]: INFO nova.compute.manager [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Took 1.15 seconds to destroy the instance on the hypervisor. [ 772.499030] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 772.499257] env[68638]: DEBUG nova.compute.manager [-] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 772.499357] env[68638]: DEBUG nova.network.neutron [-] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 772.761236] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e89af053-4428-4191-bf39-fbafbe61d39b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "24982641-40ec-4fab-8385-1bc9dea6ade1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.107s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.841352] env[68638]: INFO nova.compute.manager [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Took 53.19 seconds to build instance. [ 772.902292] env[68638]: DEBUG nova.objects.base [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Object Instance<14772ba8-bde2-42ef-9a37-df876c8af321> lazy-loaded attributes: flavor,pci_requests {{(pid=68638) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 772.902528] env[68638]: DEBUG nova.network.neutron [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 772.948529] env[68638]: DEBUG nova.policy [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '87dbe1b58a124d8ba72432b58a711496', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03a342a1ef674059b9ab1a5dc050a82d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 773.124341] env[68638]: DEBUG nova.compute.manager [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 773.147136] env[68638]: DEBUG nova.virt.hardware [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 773.147412] env[68638]: DEBUG nova.virt.hardware [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 773.147596] env[68638]: DEBUG nova.virt.hardware [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 773.147784] env[68638]: DEBUG nova.virt.hardware [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 773.147931] env[68638]: DEBUG nova.virt.hardware [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 773.148206] env[68638]: DEBUG nova.virt.hardware [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 773.148443] env[68638]: DEBUG nova.virt.hardware [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 773.148607] env[68638]: DEBUG nova.virt.hardware [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 773.148775] env[68638]: DEBUG nova.virt.hardware [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 773.148936] env[68638]: DEBUG nova.virt.hardware [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 773.149124] env[68638]: DEBUG nova.virt.hardware [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 773.149998] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45e69471-1761-4a95-8692-a51c92ff551c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.158893] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a20dd24-aa17-401b-9c64-e73e21810469 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.218908] env[68638]: DEBUG nova.network.neutron [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Successfully created port: dda84b25-2545-4c4c-a7a2-9ed304b5db43 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 773.343072] env[68638]: DEBUG oslo_concurrency.lockutils [None req-089e02ce-571b-401a-8f09-020cbc5d97f4 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Lock "039edcf8-7908-4be4-8bd3-0b55545b6f7b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.383s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 773.548171] env[68638]: DEBUG nova.network.neutron [-] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.817759] env[68638]: DEBUG nova.network.neutron [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Successfully updated port: c813e7f7-aecf-49ab-a677-4b2109dce440 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 773.844548] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-283685c7-40f5-443b-9247-4de4de428830 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.847836] env[68638]: DEBUG nova.compute.manager [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 773.856379] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa04faa-ed4f-4a74-99c2-5243cc9d22e9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.894276] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf80b7b2-d20f-4cda-b7f5-6ed1c31e76a7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.903486] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-847fcca1-fbde-4c7a-a84d-cf831e0715f8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.921239] env[68638]: DEBUG nova.compute.provider_tree [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 773.980694] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "4c954bb4-6291-47d5-a65c-0ad92a0fd193" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 773.980929] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "4c954bb4-6291-47d5-a65c-0ad92a0fd193" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.051383] env[68638]: INFO nova.compute.manager [-] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Took 1.55 seconds to deallocate network for instance. [ 774.322258] env[68638]: DEBUG nova.compute.manager [req-18caef4c-43f7-4e66-84c7-bb87ced7fbcf req-71ff5215-0acf-4201-ae14-dacd05ec7394 service nova] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Received event network-vif-deleted-e76019da-d59a-45b4-a8e3-6fcded54f7b8 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 774.323530] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Acquiring lock "refresh_cache-da306fdd-a5b4-4275-a482-f77cc008d780" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.323530] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Acquired lock "refresh_cache-da306fdd-a5b4-4275-a482-f77cc008d780" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 774.323530] env[68638]: DEBUG nova.network.neutron [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 774.367800] env[68638]: DEBUG oslo_concurrency.lockutils [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.424181] env[68638]: DEBUG nova.scheduler.client.report [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 774.558065] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.732168] env[68638]: DEBUG nova.network.neutron [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Successfully updated port: dda84b25-2545-4c4c-a7a2-9ed304b5db43 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 774.860208] env[68638]: DEBUG nova.network.neutron [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 774.932267] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.711s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.932267] env[68638]: DEBUG nova.compute.manager [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 774.935681] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.290s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.937593] env[68638]: INFO nova.compute.claims [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 775.033108] env[68638]: DEBUG nova.network.neutron [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Updating instance_info_cache with network_info: [{"id": "c813e7f7-aecf-49ab-a677-4b2109dce440", "address": "fa:16:3e:dd:f6:f4", "network": {"id": "b4599e42-4e62-4120-aef1-de4365371f9f", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1543432383-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8ea76bfe01094fc98e951d13b1f9876b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d94740a-bce8-4103-8ecf-230d02ec0a44", "external-id": "nsx-vlan-transportzone-149", "segmentation_id": 149, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc813e7f7-ae", "ovs_interfaceid": "c813e7f7-aecf-49ab-a677-4b2109dce440", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.235725] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "refresh_cache-14772ba8-bde2-42ef-9a37-df876c8af321" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.235725] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired lock "refresh_cache-14772ba8-bde2-42ef-9a37-df876c8af321" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 775.235885] env[68638]: DEBUG nova.network.neutron [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 775.395381] env[68638]: DEBUG nova.compute.manager [req-bc858447-6b8c-47d6-b217-d20c1f57ecae req-2bfbfeeb-0f92-4eb9-9ab5-09658d917c5b service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Received event network-vif-plugged-dda84b25-2545-4c4c-a7a2-9ed304b5db43 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 775.395663] env[68638]: DEBUG oslo_concurrency.lockutils [req-bc858447-6b8c-47d6-b217-d20c1f57ecae req-2bfbfeeb-0f92-4eb9-9ab5-09658d917c5b service nova] Acquiring lock "14772ba8-bde2-42ef-9a37-df876c8af321-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 775.395808] env[68638]: DEBUG oslo_concurrency.lockutils [req-bc858447-6b8c-47d6-b217-d20c1f57ecae req-2bfbfeeb-0f92-4eb9-9ab5-09658d917c5b service nova] Lock "14772ba8-bde2-42ef-9a37-df876c8af321-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 775.395959] env[68638]: DEBUG oslo_concurrency.lockutils [req-bc858447-6b8c-47d6-b217-d20c1f57ecae req-2bfbfeeb-0f92-4eb9-9ab5-09658d917c5b service nova] Lock "14772ba8-bde2-42ef-9a37-df876c8af321-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 775.396200] env[68638]: DEBUG nova.compute.manager [req-bc858447-6b8c-47d6-b217-d20c1f57ecae req-2bfbfeeb-0f92-4eb9-9ab5-09658d917c5b service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] No waiting events found dispatching network-vif-plugged-dda84b25-2545-4c4c-a7a2-9ed304b5db43 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 775.396329] env[68638]: WARNING nova.compute.manager [req-bc858447-6b8c-47d6-b217-d20c1f57ecae req-2bfbfeeb-0f92-4eb9-9ab5-09658d917c5b service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Received unexpected event network-vif-plugged-dda84b25-2545-4c4c-a7a2-9ed304b5db43 for instance with vm_state active and task_state None. [ 775.448172] env[68638]: DEBUG nova.compute.utils [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 775.449685] env[68638]: DEBUG nova.compute.manager [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 775.450542] env[68638]: DEBUG nova.network.neutron [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 775.515423] env[68638]: DEBUG nova.policy [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e66d8cfbe6c41bc90baaf1e7eb23a86', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ded98d5a15c54e01b752c52b88549b3e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 775.534468] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Releasing lock "refresh_cache-da306fdd-a5b4-4275-a482-f77cc008d780" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 775.535057] env[68638]: DEBUG nova.compute.manager [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Instance network_info: |[{"id": "c813e7f7-aecf-49ab-a677-4b2109dce440", "address": "fa:16:3e:dd:f6:f4", "network": {"id": "b4599e42-4e62-4120-aef1-de4365371f9f", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1543432383-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8ea76bfe01094fc98e951d13b1f9876b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d94740a-bce8-4103-8ecf-230d02ec0a44", "external-id": "nsx-vlan-transportzone-149", "segmentation_id": 149, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc813e7f7-ae", "ovs_interfaceid": "c813e7f7-aecf-49ab-a677-4b2109dce440", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 775.535489] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:f6:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d94740a-bce8-4103-8ecf-230d02ec0a44', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c813e7f7-aecf-49ab-a677-4b2109dce440', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 775.543466] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Creating folder: Project (8ea76bfe01094fc98e951d13b1f9876b). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 775.543811] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d2336c85-e9e3-4b80-b495-eb7912460a39 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.560828] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Created folder: Project (8ea76bfe01094fc98e951d13b1f9876b) in parent group-v569734. [ 775.561053] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Creating folder: Instances. Parent ref: group-v569883. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 775.561322] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e5f316ea-34e1-4ab7-b5b3-71ec17123447 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.578358] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Created folder: Instances in parent group-v569883. [ 775.578508] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 775.578736] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 775.578958] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a9f0256-cfd5-4d63-af31-00ae1c685c02 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.600440] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 775.600440] env[68638]: value = "task-2833543" [ 775.600440] env[68638]: _type = "Task" [ 775.600440] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.618842] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833543, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.757136] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fe35d9e-93d4-45f5-b487-d94523a74681 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Volume attach. Driver type: vmdk {{(pid=68638) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 775.757477] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fe35d9e-93d4-45f5-b487-d94523a74681 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569882', 'volume_id': '2b7daa37-8901-45a9-b233-21a7aa53a70a', 'name': 'volume-2b7daa37-8901-45a9-b233-21a7aa53a70a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c80895d5-1a59-4779-9da9-9aeec10bc395', 'attached_at': '', 'detached_at': '', 'volume_id': '2b7daa37-8901-45a9-b233-21a7aa53a70a', 'serial': '2b7daa37-8901-45a9-b233-21a7aa53a70a'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 775.758401] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d68aa08e-a913-41f1-a6eb-1a1687957431 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.797627] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db4835b4-d77d-4394-b9e1-c40134491686 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.811749] env[68638]: WARNING nova.network.neutron [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] cd5da8a3-b68c-498e-8922-d556cd2178c4 already exists in list: networks containing: ['cd5da8a3-b68c-498e-8922-d556cd2178c4']. ignoring it [ 775.858854] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fe35d9e-93d4-45f5-b487-d94523a74681 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] volume-2b7daa37-8901-45a9-b233-21a7aa53a70a/volume-2b7daa37-8901-45a9-b233-21a7aa53a70a.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 775.860182] env[68638]: DEBUG nova.network.neutron [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Successfully created port: ab64b5b6-6ab8-4d2a-ab52-5e5702f19dad {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 775.862921] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-028384fa-a4b7-414f-9843-359eec5b6bea {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.898277] env[68638]: DEBUG oslo_vmware.api [None req-1fe35d9e-93d4-45f5-b487-d94523a74681 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for the task: (returnval){ [ 775.898277] env[68638]: value = "task-2833544" [ 775.898277] env[68638]: _type = "Task" [ 775.898277] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.911198] env[68638]: DEBUG oslo_vmware.api [None req-1fe35d9e-93d4-45f5-b487-d94523a74681 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833544, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.954521] env[68638]: DEBUG nova.compute.manager [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 776.112976] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833543, 'name': CreateVM_Task, 'duration_secs': 0.496881} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.113312] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 776.114218] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.114430] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.114924] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 776.115295] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc8448b9-09c4-4a31-97bd-33bafd79ea91 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.123944] env[68638]: DEBUG oslo_vmware.api [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Waiting for the task: (returnval){ [ 776.123944] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ac5908-2942-d538-2887-8986c685664c" [ 776.123944] env[68638]: _type = "Task" [ 776.123944] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.139380] env[68638]: DEBUG oslo_vmware.api [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ac5908-2942-d538-2887-8986c685664c, 'name': SearchDatastore_Task, 'duration_secs': 0.011358} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.139712] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.139901] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 776.140149] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.140296] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.140474] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 776.140729] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a8b9f12f-95d8-4293-9195-f9c655881181 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.150482] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 776.150732] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 776.151962] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65d18f46-4116-49ce-b276-f8ca1c5c32b8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.157471] env[68638]: DEBUG oslo_vmware.api [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Waiting for the task: (returnval){ [ 776.157471] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52486b07-46e6-a475-9dee-a6385a754bff" [ 776.157471] env[68638]: _type = "Task" [ 776.157471] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.167779] env[68638]: DEBUG oslo_vmware.api [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52486b07-46e6-a475-9dee-a6385a754bff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.408819] env[68638]: DEBUG oslo_vmware.api [None req-1fe35d9e-93d4-45f5-b487-d94523a74681 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833544, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.516788] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdfbbba1-8336-47b3-89d5-96e30cb6abd8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.525179] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6161e440-9065-4f9b-a944-896f36e533d9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.558282] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6117bed3-4bb6-4e8e-868b-ae4793bd18fc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.566900] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc60e18f-cce4-486d-860a-f0a63a0de2ce {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.580729] env[68638]: DEBUG nova.compute.provider_tree [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 776.618287] env[68638]: DEBUG nova.network.neutron [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Updating instance_info_cache with network_info: [{"id": "316407a1-ab13-4bd4-98ef-7e090d54399c", "address": "fa:16:3e:ca:38:c2", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap316407a1-ab", "ovs_interfaceid": "316407a1-ab13-4bd4-98ef-7e090d54399c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "dda84b25-2545-4c4c-a7a2-9ed304b5db43", "address": "fa:16:3e:10:71:23", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdda84b25-25", "ovs_interfaceid": "dda84b25-2545-4c4c-a7a2-9ed304b5db43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.668345] env[68638]: DEBUG oslo_vmware.api [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52486b07-46e6-a475-9dee-a6385a754bff, 'name': SearchDatastore_Task, 'duration_secs': 0.019647} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.669154] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b40abed4-ed57-41a7-b2d2-d19ac249c0f7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.676828] env[68638]: DEBUG oslo_vmware.api [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Waiting for the task: (returnval){ [ 776.676828] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5288f47e-33b4-768f-1f33-4368b26e8b68" [ 776.676828] env[68638]: _type = "Task" [ 776.676828] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.685514] env[68638]: DEBUG oslo_vmware.api [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5288f47e-33b4-768f-1f33-4368b26e8b68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.909630] env[68638]: DEBUG oslo_vmware.api [None req-1fe35d9e-93d4-45f5-b487-d94523a74681 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833544, 'name': ReconfigVM_Task, 'duration_secs': 0.619544} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.909926] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fe35d9e-93d4-45f5-b487-d94523a74681 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Reconfigured VM instance instance-00000007 to attach disk [datastore2] volume-2b7daa37-8901-45a9-b233-21a7aa53a70a/volume-2b7daa37-8901-45a9-b233-21a7aa53a70a.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 776.914859] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec607bce-c315-454e-aea9-72ebd30d49d2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.931187] env[68638]: DEBUG oslo_vmware.api [None req-1fe35d9e-93d4-45f5-b487-d94523a74681 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for the task: (returnval){ [ 776.931187] env[68638]: value = "task-2833545" [ 776.931187] env[68638]: _type = "Task" [ 776.931187] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.940244] env[68638]: DEBUG oslo_vmware.api [None req-1fe35d9e-93d4-45f5-b487-d94523a74681 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833545, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.971032] env[68638]: DEBUG nova.compute.manager [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 776.999620] env[68638]: DEBUG nova.virt.hardware [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 776.999875] env[68638]: DEBUG nova.virt.hardware [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 777.000051] env[68638]: DEBUG nova.virt.hardware [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 777.000247] env[68638]: DEBUG nova.virt.hardware [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 777.000392] env[68638]: DEBUG nova.virt.hardware [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 777.000537] env[68638]: DEBUG nova.virt.hardware [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 777.000749] env[68638]: DEBUG nova.virt.hardware [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 777.000907] env[68638]: DEBUG nova.virt.hardware [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 777.001087] env[68638]: DEBUG nova.virt.hardware [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 777.001256] env[68638]: DEBUG nova.virt.hardware [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 777.001428] env[68638]: DEBUG nova.virt.hardware [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 777.002364] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e4de1a-0fbb-4bad-8562-e035813bad35 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.011603] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e6d988-4e0d-4212-872e-219e7b087d10 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.083839] env[68638]: DEBUG nova.scheduler.client.report [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 777.121107] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Releasing lock "refresh_cache-14772ba8-bde2-42ef-9a37-df876c8af321" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 777.121885] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "14772ba8-bde2-42ef-9a37-df876c8af321" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.122078] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired lock "14772ba8-bde2-42ef-9a37-df876c8af321" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 777.122920] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b43c924-0525-4247-a696-900fb3e64a1c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.140719] env[68638]: DEBUG nova.virt.hardware [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 777.140954] env[68638]: DEBUG nova.virt.hardware [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 777.141130] env[68638]: DEBUG nova.virt.hardware [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 777.141318] env[68638]: DEBUG nova.virt.hardware [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 777.141466] env[68638]: DEBUG nova.virt.hardware [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 777.141612] env[68638]: DEBUG nova.virt.hardware [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 777.141817] env[68638]: DEBUG nova.virt.hardware [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 777.141971] env[68638]: DEBUG nova.virt.hardware [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 777.142161] env[68638]: DEBUG nova.virt.hardware [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 777.142331] env[68638]: DEBUG nova.virt.hardware [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 777.142504] env[68638]: DEBUG nova.virt.hardware [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 777.149801] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Reconfiguring VM to attach interface {{(pid=68638) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 777.150140] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b4dddf0d-3542-4083-b1ca-76c21ffe8941 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.169704] env[68638]: DEBUG oslo_vmware.api [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 777.169704] env[68638]: value = "task-2833546" [ 777.169704] env[68638]: _type = "Task" [ 777.169704] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.179755] env[68638]: DEBUG oslo_vmware.api [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833546, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.189267] env[68638]: DEBUG oslo_vmware.api [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5288f47e-33b4-768f-1f33-4368b26e8b68, 'name': SearchDatastore_Task, 'duration_secs': 0.012637} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.189267] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 777.189406] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] da306fdd-a5b4-4275-a482-f77cc008d780/da306fdd-a5b4-4275-a482-f77cc008d780.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 777.189552] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1d9c20ec-94a8-4f1c-9822-825d7a57afb8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.197576] env[68638]: DEBUG oslo_vmware.api [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Waiting for the task: (returnval){ [ 777.197576] env[68638]: value = "task-2833547" [ 777.197576] env[68638]: _type = "Task" [ 777.197576] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.206858] env[68638]: DEBUG oslo_vmware.api [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Task: {'id': task-2833547, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.381288] env[68638]: DEBUG nova.network.neutron [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Successfully updated port: ab64b5b6-6ab8-4d2a-ab52-5e5702f19dad {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 777.443418] env[68638]: DEBUG oslo_vmware.api [None req-1fe35d9e-93d4-45f5-b487-d94523a74681 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833545, 'name': ReconfigVM_Task, 'duration_secs': 0.176058} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.443849] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fe35d9e-93d4-45f5-b487-d94523a74681 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569882', 'volume_id': '2b7daa37-8901-45a9-b233-21a7aa53a70a', 'name': 'volume-2b7daa37-8901-45a9-b233-21a7aa53a70a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c80895d5-1a59-4779-9da9-9aeec10bc395', 'attached_at': '', 'detached_at': '', 'volume_id': '2b7daa37-8901-45a9-b233-21a7aa53a70a', 'serial': '2b7daa37-8901-45a9-b233-21a7aa53a70a'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 777.472347] env[68638]: DEBUG nova.compute.manager [req-93dae968-ceff-4c5d-be6f-716c45f0a690 req-06fd0507-7be8-4d3d-a751-9ee762ee998f service nova] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Received event network-vif-plugged-c813e7f7-aecf-49ab-a677-4b2109dce440 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 777.472648] env[68638]: DEBUG oslo_concurrency.lockutils [req-93dae968-ceff-4c5d-be6f-716c45f0a690 req-06fd0507-7be8-4d3d-a751-9ee762ee998f service nova] Acquiring lock "da306fdd-a5b4-4275-a482-f77cc008d780-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 777.472899] env[68638]: DEBUG oslo_concurrency.lockutils [req-93dae968-ceff-4c5d-be6f-716c45f0a690 req-06fd0507-7be8-4d3d-a751-9ee762ee998f service nova] Lock "da306fdd-a5b4-4275-a482-f77cc008d780-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 777.473314] env[68638]: DEBUG oslo_concurrency.lockutils [req-93dae968-ceff-4c5d-be6f-716c45f0a690 req-06fd0507-7be8-4d3d-a751-9ee762ee998f service nova] Lock "da306fdd-a5b4-4275-a482-f77cc008d780-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.473539] env[68638]: DEBUG nova.compute.manager [req-93dae968-ceff-4c5d-be6f-716c45f0a690 req-06fd0507-7be8-4d3d-a751-9ee762ee998f service nova] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] No waiting events found dispatching network-vif-plugged-c813e7f7-aecf-49ab-a677-4b2109dce440 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 777.473767] env[68638]: WARNING nova.compute.manager [req-93dae968-ceff-4c5d-be6f-716c45f0a690 req-06fd0507-7be8-4d3d-a751-9ee762ee998f service nova] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Received unexpected event network-vif-plugged-c813e7f7-aecf-49ab-a677-4b2109dce440 for instance with vm_state building and task_state spawning. [ 777.474044] env[68638]: DEBUG nova.compute.manager [req-93dae968-ceff-4c5d-be6f-716c45f0a690 req-06fd0507-7be8-4d3d-a751-9ee762ee998f service nova] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Received event network-changed-c813e7f7-aecf-49ab-a677-4b2109dce440 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 777.474284] env[68638]: DEBUG nova.compute.manager [req-93dae968-ceff-4c5d-be6f-716c45f0a690 req-06fd0507-7be8-4d3d-a751-9ee762ee998f service nova] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Refreshing instance network info cache due to event network-changed-c813e7f7-aecf-49ab-a677-4b2109dce440. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 777.474534] env[68638]: DEBUG oslo_concurrency.lockutils [req-93dae968-ceff-4c5d-be6f-716c45f0a690 req-06fd0507-7be8-4d3d-a751-9ee762ee998f service nova] Acquiring lock "refresh_cache-da306fdd-a5b4-4275-a482-f77cc008d780" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.474724] env[68638]: DEBUG oslo_concurrency.lockutils [req-93dae968-ceff-4c5d-be6f-716c45f0a690 req-06fd0507-7be8-4d3d-a751-9ee762ee998f service nova] Acquired lock "refresh_cache-da306fdd-a5b4-4275-a482-f77cc008d780" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 777.474938] env[68638]: DEBUG nova.network.neutron [req-93dae968-ceff-4c5d-be6f-716c45f0a690 req-06fd0507-7be8-4d3d-a751-9ee762ee998f service nova] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Refreshing network info cache for port c813e7f7-aecf-49ab-a677-4b2109dce440 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 777.589092] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.653s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.589701] env[68638]: DEBUG nova.compute.manager [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 777.592678] env[68638]: DEBUG oslo_concurrency.lockutils [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 33.218s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 777.592896] env[68638]: DEBUG nova.objects.instance [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68638) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 777.682031] env[68638]: DEBUG oslo_vmware.api [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833546, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.708295] env[68638]: DEBUG oslo_vmware.api [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Task: {'id': task-2833547, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499881} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.708599] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] da306fdd-a5b4-4275-a482-f77cc008d780/da306fdd-a5b4-4275-a482-f77cc008d780.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 777.708830] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 777.709113] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-211d0926-41b5-4a26-9ca8-c175b8af4c6c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.716612] env[68638]: DEBUG oslo_vmware.api [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Waiting for the task: (returnval){ [ 777.716612] env[68638]: value = "task-2833548" [ 777.716612] env[68638]: _type = "Task" [ 777.716612] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.726302] env[68638]: DEBUG oslo_vmware.api [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Task: {'id': task-2833548, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.886042] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "refresh_cache-bb86aabd-129d-4c14-9db1-6676a5e7b9fa" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.886042] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquired lock "refresh_cache-bb86aabd-129d-4c14-9db1-6676a5e7b9fa" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 777.886042] env[68638]: DEBUG nova.network.neutron [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 778.098446] env[68638]: DEBUG nova.compute.utils [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 778.102668] env[68638]: DEBUG nova.compute.manager [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 778.103051] env[68638]: DEBUG nova.network.neutron [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 778.154932] env[68638]: DEBUG nova.policy [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '35503102d9274ae1b18e12a931d5efa2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa16293a678b4a35ac0837f6ce904e48', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 778.182046] env[68638]: DEBUG oslo_vmware.api [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833546, 'name': ReconfigVM_Task, 'duration_secs': 0.8885} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.182046] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Releasing lock "14772ba8-bde2-42ef-9a37-df876c8af321" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.182262] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Reconfigured VM to attach interface {{(pid=68638) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 778.194817] env[68638]: DEBUG nova.network.neutron [req-93dae968-ceff-4c5d-be6f-716c45f0a690 req-06fd0507-7be8-4d3d-a751-9ee762ee998f service nova] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Updated VIF entry in instance network info cache for port c813e7f7-aecf-49ab-a677-4b2109dce440. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 778.195537] env[68638]: DEBUG nova.network.neutron [req-93dae968-ceff-4c5d-be6f-716c45f0a690 req-06fd0507-7be8-4d3d-a751-9ee762ee998f service nova] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Updating instance_info_cache with network_info: [{"id": "c813e7f7-aecf-49ab-a677-4b2109dce440", "address": "fa:16:3e:dd:f6:f4", "network": {"id": "b4599e42-4e62-4120-aef1-de4365371f9f", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1543432383-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8ea76bfe01094fc98e951d13b1f9876b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d94740a-bce8-4103-8ecf-230d02ec0a44", "external-id": "nsx-vlan-transportzone-149", "segmentation_id": 149, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc813e7f7-ae", "ovs_interfaceid": "c813e7f7-aecf-49ab-a677-4b2109dce440", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.228472] env[68638]: DEBUG oslo_vmware.api [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Task: {'id': task-2833548, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.137031} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.228743] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 778.230900] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b903a08-f94e-4a1e-b88e-0502c9886b39 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.256017] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] da306fdd-a5b4-4275-a482-f77cc008d780/da306fdd-a5b4-4275-a482-f77cc008d780.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 778.256380] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26f8975e-dced-44da-8bf7-9e66a9b3448d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.278516] env[68638]: DEBUG oslo_vmware.api [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Waiting for the task: (returnval){ [ 778.278516] env[68638]: value = "task-2833549" [ 778.278516] env[68638]: _type = "Task" [ 778.278516] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.288207] env[68638]: DEBUG oslo_vmware.api [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Task: {'id': task-2833549, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.402180] env[68638]: DEBUG nova.compute.manager [req-fea2793f-6688-41d2-bed6-f0cb3bce7adb req-135068d1-391a-444d-a9d1-96e2c951a155 service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Received event network-changed-dda84b25-2545-4c4c-a7a2-9ed304b5db43 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 778.402387] env[68638]: DEBUG nova.compute.manager [req-fea2793f-6688-41d2-bed6-f0cb3bce7adb req-135068d1-391a-444d-a9d1-96e2c951a155 service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Refreshing instance network info cache due to event network-changed-dda84b25-2545-4c4c-a7a2-9ed304b5db43. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 778.402543] env[68638]: DEBUG oslo_concurrency.lockutils [req-fea2793f-6688-41d2-bed6-f0cb3bce7adb req-135068d1-391a-444d-a9d1-96e2c951a155 service nova] Acquiring lock "refresh_cache-14772ba8-bde2-42ef-9a37-df876c8af321" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.402699] env[68638]: DEBUG oslo_concurrency.lockutils [req-fea2793f-6688-41d2-bed6-f0cb3bce7adb req-135068d1-391a-444d-a9d1-96e2c951a155 service nova] Acquired lock "refresh_cache-14772ba8-bde2-42ef-9a37-df876c8af321" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 778.402833] env[68638]: DEBUG nova.network.neutron [req-fea2793f-6688-41d2-bed6-f0cb3bce7adb req-135068d1-391a-444d-a9d1-96e2c951a155 service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Refreshing network info cache for port dda84b25-2545-4c4c-a7a2-9ed304b5db43 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 778.421965] env[68638]: DEBUG nova.network.neutron [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 778.484848] env[68638]: DEBUG nova.objects.instance [None req-1fe35d9e-93d4-45f5-b487-d94523a74681 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lazy-loading 'flavor' on Instance uuid c80895d5-1a59-4779-9da9-9aeec10bc395 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 778.518013] env[68638]: DEBUG nova.network.neutron [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Successfully created port: d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 778.556892] env[68638]: DEBUG nova.network.neutron [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Updating instance_info_cache with network_info: [{"id": "ab64b5b6-6ab8-4d2a-ab52-5e5702f19dad", "address": "fa:16:3e:37:05:1d", "network": {"id": "3cca37af-f3c4-433b-875a-8e01675c3975", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1292035020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ded98d5a15c54e01b752c52b88549b3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab64b5b6-6a", "ovs_interfaceid": "ab64b5b6-6ab8-4d2a-ab52-5e5702f19dad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.604226] env[68638]: DEBUG nova.compute.manager [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 778.610025] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "14c1dba5-98cb-4ebd-8e76-60b3f74cca4b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.610025] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "14c1dba5-98cb-4ebd-8e76-60b3f74cca4b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.610025] env[68638]: DEBUG oslo_concurrency.lockutils [None req-00ae7500-bbc7-46c8-a548-d2d8ab77d428 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.610025] env[68638]: DEBUG oslo_concurrency.lockutils [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.555s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.610025] env[68638]: DEBUG nova.objects.instance [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Lazy-loading 'resources' on Instance uuid ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 778.688452] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3965287-9d0d-4431-8337-ea5e9205f100 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "interface-14772ba8-bde2-42ef-9a37-df876c8af321-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.932s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.698158] env[68638]: DEBUG oslo_concurrency.lockutils [req-93dae968-ceff-4c5d-be6f-716c45f0a690 req-06fd0507-7be8-4d3d-a751-9ee762ee998f service nova] Releasing lock "refresh_cache-da306fdd-a5b4-4275-a482-f77cc008d780" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.698481] env[68638]: DEBUG nova.compute.manager [req-93dae968-ceff-4c5d-be6f-716c45f0a690 req-06fd0507-7be8-4d3d-a751-9ee762ee998f service nova] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Received event network-changed-83007aec-935b-4f0b-9797-0a3e4b7435e7 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 778.698705] env[68638]: DEBUG nova.compute.manager [req-93dae968-ceff-4c5d-be6f-716c45f0a690 req-06fd0507-7be8-4d3d-a751-9ee762ee998f service nova] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Refreshing instance network info cache due to event network-changed-83007aec-935b-4f0b-9797-0a3e4b7435e7. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 778.698957] env[68638]: DEBUG oslo_concurrency.lockutils [req-93dae968-ceff-4c5d-be6f-716c45f0a690 req-06fd0507-7be8-4d3d-a751-9ee762ee998f service nova] Acquiring lock "refresh_cache-039edcf8-7908-4be4-8bd3-0b55545b6f7b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.699147] env[68638]: DEBUG oslo_concurrency.lockutils [req-93dae968-ceff-4c5d-be6f-716c45f0a690 req-06fd0507-7be8-4d3d-a751-9ee762ee998f service nova] Acquired lock "refresh_cache-039edcf8-7908-4be4-8bd3-0b55545b6f7b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 778.699683] env[68638]: DEBUG nova.network.neutron [req-93dae968-ceff-4c5d-be6f-716c45f0a690 req-06fd0507-7be8-4d3d-a751-9ee762ee998f service nova] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Refreshing network info cache for port 83007aec-935b-4f0b-9797-0a3e4b7435e7 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 778.789444] env[68638]: DEBUG oslo_vmware.api [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Task: {'id': task-2833549, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.989948] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1fe35d9e-93d4-45f5-b487-d94523a74681 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "c80895d5-1a59-4779-9da9-9aeec10bc395" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.899s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 779.059869] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Releasing lock "refresh_cache-bb86aabd-129d-4c14-9db1-6676a5e7b9fa" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 779.060264] env[68638]: DEBUG nova.compute.manager [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Instance network_info: |[{"id": "ab64b5b6-6ab8-4d2a-ab52-5e5702f19dad", "address": "fa:16:3e:37:05:1d", "network": {"id": "3cca37af-f3c4-433b-875a-8e01675c3975", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1292035020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ded98d5a15c54e01b752c52b88549b3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab64b5b6-6a", "ovs_interfaceid": "ab64b5b6-6ab8-4d2a-ab52-5e5702f19dad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 779.060675] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:05:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3a80436-f7a9-431a-acec-aca3d76e3f9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ab64b5b6-6ab8-4d2a-ab52-5e5702f19dad', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 779.069006] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 779.069538] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 779.069774] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-656eeae8-67d4-443c-9a9b-bad63f637ed0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.092449] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 779.092449] env[68638]: value = "task-2833550" [ 779.092449] env[68638]: _type = "Task" [ 779.092449] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.100789] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833550, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.118914] env[68638]: DEBUG nova.network.neutron [req-fea2793f-6688-41d2-bed6-f0cb3bce7adb req-135068d1-391a-444d-a9d1-96e2c951a155 service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Updated VIF entry in instance network info cache for port dda84b25-2545-4c4c-a7a2-9ed304b5db43. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 779.119353] env[68638]: DEBUG nova.network.neutron [req-fea2793f-6688-41d2-bed6-f0cb3bce7adb req-135068d1-391a-444d-a9d1-96e2c951a155 service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Updating instance_info_cache with network_info: [{"id": "316407a1-ab13-4bd4-98ef-7e090d54399c", "address": "fa:16:3e:ca:38:c2", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap316407a1-ab", "ovs_interfaceid": "316407a1-ab13-4bd4-98ef-7e090d54399c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "dda84b25-2545-4c4c-a7a2-9ed304b5db43", "address": "fa:16:3e:10:71:23", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdda84b25-25", "ovs_interfaceid": "dda84b25-2545-4c4c-a7a2-9ed304b5db43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.293218] env[68638]: DEBUG oslo_vmware.api [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Task: {'id': task-2833549, 'name': ReconfigVM_Task, 'duration_secs': 0.939765} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.293536] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Reconfigured VM instance instance-00000031 to attach disk [datastore1] da306fdd-a5b4-4275-a482-f77cc008d780/da306fdd-a5b4-4275-a482-f77cc008d780.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 779.294256] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a19cfb71-d594-4744-b8f6-803aacd329c0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.306189] env[68638]: DEBUG oslo_vmware.api [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Waiting for the task: (returnval){ [ 779.306189] env[68638]: value = "task-2833551" [ 779.306189] env[68638]: _type = "Task" [ 779.306189] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.318584] env[68638]: DEBUG oslo_vmware.api [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Task: {'id': task-2833551, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.506029] env[68638]: DEBUG nova.network.neutron [req-93dae968-ceff-4c5d-be6f-716c45f0a690 req-06fd0507-7be8-4d3d-a751-9ee762ee998f service nova] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Updated VIF entry in instance network info cache for port 83007aec-935b-4f0b-9797-0a3e4b7435e7. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 779.506306] env[68638]: DEBUG nova.network.neutron [req-93dae968-ceff-4c5d-be6f-716c45f0a690 req-06fd0507-7be8-4d3d-a751-9ee762ee998f service nova] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Updating instance_info_cache with network_info: [{"id": "83007aec-935b-4f0b-9797-0a3e4b7435e7", "address": "fa:16:3e:dc:51:32", "network": {"id": "d3055f46-c454-41cb-b05b-5a7300fb8ab4", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-516232156-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "023c6f361e7c486a9a75b69ea8cae208", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbe1725d-6711-4e92-9a4e-d4802651e7d0", "external-id": "nsx-vlan-transportzone-679", "segmentation_id": 679, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83007aec-93", "ovs_interfaceid": "83007aec-935b-4f0b-9797-0a3e4b7435e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.606051] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833550, 'name': CreateVM_Task, 'duration_secs': 0.33294} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.606154] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 779.606822] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.606994] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 779.607319] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 779.607688] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cae13e3-2d0b-466a-b70f-81a111ac666e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.614423] env[68638]: DEBUG nova.compute.manager [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 779.616282] env[68638]: DEBUG oslo_vmware.api [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 779.616282] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52b8a410-bbe0-5554-d01f-4bd101e7f42a" [ 779.616282] env[68638]: _type = "Task" [ 779.616282] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.622653] env[68638]: DEBUG oslo_concurrency.lockutils [req-fea2793f-6688-41d2-bed6-f0cb3bce7adb req-135068d1-391a-444d-a9d1-96e2c951a155 service nova] Releasing lock "refresh_cache-14772ba8-bde2-42ef-9a37-df876c8af321" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 779.622919] env[68638]: DEBUG nova.compute.manager [req-fea2793f-6688-41d2-bed6-f0cb3bce7adb req-135068d1-391a-444d-a9d1-96e2c951a155 service nova] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Received event network-vif-plugged-ab64b5b6-6ab8-4d2a-ab52-5e5702f19dad {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 779.624295] env[68638]: DEBUG oslo_concurrency.lockutils [req-fea2793f-6688-41d2-bed6-f0cb3bce7adb req-135068d1-391a-444d-a9d1-96e2c951a155 service nova] Acquiring lock "bb86aabd-129d-4c14-9db1-6676a5e7b9fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.624295] env[68638]: DEBUG oslo_concurrency.lockutils [req-fea2793f-6688-41d2-bed6-f0cb3bce7adb req-135068d1-391a-444d-a9d1-96e2c951a155 service nova] Lock "bb86aabd-129d-4c14-9db1-6676a5e7b9fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.624295] env[68638]: DEBUG oslo_concurrency.lockutils [req-fea2793f-6688-41d2-bed6-f0cb3bce7adb req-135068d1-391a-444d-a9d1-96e2c951a155 service nova] Lock "bb86aabd-129d-4c14-9db1-6676a5e7b9fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 779.624295] env[68638]: DEBUG nova.compute.manager [req-fea2793f-6688-41d2-bed6-f0cb3bce7adb req-135068d1-391a-444d-a9d1-96e2c951a155 service nova] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] No waiting events found dispatching network-vif-plugged-ab64b5b6-6ab8-4d2a-ab52-5e5702f19dad {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 779.624295] env[68638]: WARNING nova.compute.manager [req-fea2793f-6688-41d2-bed6-f0cb3bce7adb req-135068d1-391a-444d-a9d1-96e2c951a155 service nova] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Received unexpected event network-vif-plugged-ab64b5b6-6ab8-4d2a-ab52-5e5702f19dad for instance with vm_state building and task_state spawning. [ 779.624295] env[68638]: DEBUG nova.compute.manager [req-fea2793f-6688-41d2-bed6-f0cb3bce7adb req-135068d1-391a-444d-a9d1-96e2c951a155 service nova] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Received event network-changed-ab64b5b6-6ab8-4d2a-ab52-5e5702f19dad {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 779.624295] env[68638]: DEBUG nova.compute.manager [req-fea2793f-6688-41d2-bed6-f0cb3bce7adb req-135068d1-391a-444d-a9d1-96e2c951a155 service nova] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Refreshing instance network info cache due to event network-changed-ab64b5b6-6ab8-4d2a-ab52-5e5702f19dad. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 779.624534] env[68638]: DEBUG oslo_concurrency.lockutils [req-fea2793f-6688-41d2-bed6-f0cb3bce7adb req-135068d1-391a-444d-a9d1-96e2c951a155 service nova] Acquiring lock "refresh_cache-bb86aabd-129d-4c14-9db1-6676a5e7b9fa" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.624959] env[68638]: DEBUG oslo_concurrency.lockutils [req-fea2793f-6688-41d2-bed6-f0cb3bce7adb req-135068d1-391a-444d-a9d1-96e2c951a155 service nova] Acquired lock "refresh_cache-bb86aabd-129d-4c14-9db1-6676a5e7b9fa" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 779.624959] env[68638]: DEBUG nova.network.neutron [req-fea2793f-6688-41d2-bed6-f0cb3bce7adb req-135068d1-391a-444d-a9d1-96e2c951a155 service nova] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Refreshing network info cache for port ab64b5b6-6ab8-4d2a-ab52-5e5702f19dad {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 779.632335] env[68638]: DEBUG oslo_vmware.api [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b8a410-bbe0-5554-d01f-4bd101e7f42a, 'name': SearchDatastore_Task, 'duration_secs': 0.01146} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.632887] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 779.633152] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 779.633441] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.633606] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 779.633785] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 779.634603] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-79e8c48e-499b-4b0c-927b-bb7387d412e3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.647184] env[68638]: DEBUG nova.virt.hardware [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 779.647184] env[68638]: DEBUG nova.virt.hardware [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 779.647184] env[68638]: DEBUG nova.virt.hardware [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 779.647184] env[68638]: DEBUG nova.virt.hardware [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 779.647184] env[68638]: DEBUG nova.virt.hardware [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 779.647498] env[68638]: DEBUG nova.virt.hardware [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 779.647498] env[68638]: DEBUG nova.virt.hardware [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 779.647738] env[68638]: DEBUG nova.virt.hardware [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 779.647880] env[68638]: DEBUG nova.virt.hardware [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 779.647966] env[68638]: DEBUG nova.virt.hardware [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 779.648221] env[68638]: DEBUG nova.virt.hardware [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 779.649500] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1197d6d2-a9a9-4e45-bafc-6fbee6cc0360 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.653399] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 779.653573] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 779.654630] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25735bf6-e1ad-493b-817d-e4d6dd9745b6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.660590] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7bbc12-8cb5-4179-8a50-180651394a26 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.667883] env[68638]: DEBUG oslo_vmware.api [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 779.667883] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]521ac165-1be7-2135-84ff-5cbed54e52f6" [ 779.667883] env[68638]: _type = "Task" [ 779.667883] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.687729] env[68638]: DEBUG oslo_vmware.api [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]521ac165-1be7-2135-84ff-5cbed54e52f6, 'name': SearchDatastore_Task, 'duration_secs': 0.011443} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.688561] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a765614f-3b64-4e65-be87-1cb53dc39014 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.694909] env[68638]: DEBUG oslo_vmware.api [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 779.694909] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52b03152-1d67-874c-5d84-0d36e442ab95" [ 779.694909] env[68638]: _type = "Task" [ 779.694909] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.703918] env[68638]: DEBUG oslo_vmware.api [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b03152-1d67-874c-5d84-0d36e442ab95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.718828] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0587d0d0-c375-44c3-bf42-4ae1159c1566 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.727128] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c02bfd2c-20be-4e73-a21f-e01f058ca335 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.759191] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e15890-171f-415b-9619-16a423ba1fd0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.767552] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee15cab-aaaa-4193-8c9e-059482cd2ab2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.782432] env[68638]: DEBUG nova.compute.provider_tree [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.817221] env[68638]: DEBUG oslo_vmware.api [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Task: {'id': task-2833551, 'name': Rename_Task, 'duration_secs': 0.19592} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.817568] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 779.817928] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-71d3fa6d-4e7d-4a69-8d56-796c3f4b5b76 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.825938] env[68638]: DEBUG oslo_vmware.api [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Waiting for the task: (returnval){ [ 779.825938] env[68638]: value = "task-2833552" [ 779.825938] env[68638]: _type = "Task" [ 779.825938] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.838242] env[68638]: DEBUG oslo_vmware.api [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Task: {'id': task-2833552, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.011136] env[68638]: DEBUG oslo_concurrency.lockutils [req-93dae968-ceff-4c5d-be6f-716c45f0a690 req-06fd0507-7be8-4d3d-a751-9ee762ee998f service nova] Releasing lock "refresh_cache-039edcf8-7908-4be4-8bd3-0b55545b6f7b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 780.090489] env[68638]: DEBUG nova.network.neutron [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Successfully updated port: d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 780.206733] env[68638]: DEBUG oslo_vmware.api [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b03152-1d67-874c-5d84-0d36e442ab95, 'name': SearchDatastore_Task, 'duration_secs': 0.010535} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.207051] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 780.207351] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] bb86aabd-129d-4c14-9db1-6676a5e7b9fa/bb86aabd-129d-4c14-9db1-6676a5e7b9fa.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 780.208124] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a3708873-b497-4a8a-9268-793cf367b528 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.212193] env[68638]: DEBUG nova.compute.manager [req-a4ad13d7-727b-4091-ab46-424bc9e5d0ff req-b2d8294b-f163-41eb-9792-b53ce6c86a4b service nova] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Received event network-vif-plugged-d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 780.212721] env[68638]: DEBUG oslo_concurrency.lockutils [req-a4ad13d7-727b-4091-ab46-424bc9e5d0ff req-b2d8294b-f163-41eb-9792-b53ce6c86a4b service nova] Acquiring lock "90c192bd-b823-414c-b793-260eacc9904f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 780.213022] env[68638]: DEBUG oslo_concurrency.lockutils [req-a4ad13d7-727b-4091-ab46-424bc9e5d0ff req-b2d8294b-f163-41eb-9792-b53ce6c86a4b service nova] Lock "90c192bd-b823-414c-b793-260eacc9904f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.213233] env[68638]: DEBUG oslo_concurrency.lockutils [req-a4ad13d7-727b-4091-ab46-424bc9e5d0ff req-b2d8294b-f163-41eb-9792-b53ce6c86a4b service nova] Lock "90c192bd-b823-414c-b793-260eacc9904f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.213408] env[68638]: DEBUG nova.compute.manager [req-a4ad13d7-727b-4091-ab46-424bc9e5d0ff req-b2d8294b-f163-41eb-9792-b53ce6c86a4b service nova] [instance: 90c192bd-b823-414c-b793-260eacc9904f] No waiting events found dispatching network-vif-plugged-d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 780.213569] env[68638]: WARNING nova.compute.manager [req-a4ad13d7-727b-4091-ab46-424bc9e5d0ff req-b2d8294b-f163-41eb-9792-b53ce6c86a4b service nova] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Received unexpected event network-vif-plugged-d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3 for instance with vm_state building and task_state spawning. [ 780.221295] env[68638]: DEBUG oslo_vmware.api [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 780.221295] env[68638]: value = "task-2833553" [ 780.221295] env[68638]: _type = "Task" [ 780.221295] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.231512] env[68638]: DEBUG oslo_vmware.api [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833553, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.286073] env[68638]: DEBUG nova.scheduler.client.report [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 780.340032] env[68638]: DEBUG oslo_vmware.api [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Task: {'id': task-2833552, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.413058] env[68638]: DEBUG nova.network.neutron [req-fea2793f-6688-41d2-bed6-f0cb3bce7adb req-135068d1-391a-444d-a9d1-96e2c951a155 service nova] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Updated VIF entry in instance network info cache for port ab64b5b6-6ab8-4d2a-ab52-5e5702f19dad. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 780.413733] env[68638]: DEBUG nova.network.neutron [req-fea2793f-6688-41d2-bed6-f0cb3bce7adb req-135068d1-391a-444d-a9d1-96e2c951a155 service nova] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Updating instance_info_cache with network_info: [{"id": "ab64b5b6-6ab8-4d2a-ab52-5e5702f19dad", "address": "fa:16:3e:37:05:1d", "network": {"id": "3cca37af-f3c4-433b-875a-8e01675c3975", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1292035020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ded98d5a15c54e01b752c52b88549b3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab64b5b6-6a", "ovs_interfaceid": "ab64b5b6-6ab8-4d2a-ab52-5e5702f19dad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.579837] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1676e2cb-ff60-419e-bf1f-b4afa64c169d tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquiring lock "c80895d5-1a59-4779-9da9-9aeec10bc395" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 780.580207] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1676e2cb-ff60-419e-bf1f-b4afa64c169d tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "c80895d5-1a59-4779-9da9-9aeec10bc395" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.593376] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "refresh_cache-90c192bd-b823-414c-b793-260eacc9904f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.593530] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquired lock "refresh_cache-90c192bd-b823-414c-b793-260eacc9904f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 780.593682] env[68638]: DEBUG nova.network.neutron [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 780.733637] env[68638]: DEBUG oslo_vmware.api [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833553, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.792096] env[68638]: DEBUG oslo_concurrency.lockutils [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.182s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.794760] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.282s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.796270] env[68638]: INFO nova.compute.claims [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 780.817808] env[68638]: INFO nova.scheduler.client.report [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Deleted allocations for instance ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac [ 780.844529] env[68638]: DEBUG oslo_vmware.api [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Task: {'id': task-2833552, 'name': PowerOnVM_Task, 'duration_secs': 0.680372} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.847460] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 780.847460] env[68638]: INFO nova.compute.manager [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Took 7.72 seconds to spawn the instance on the hypervisor. [ 780.847460] env[68638]: DEBUG nova.compute.manager [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 780.847643] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a24dd413-1543-44e1-8abd-d3488354c4f8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.919396] env[68638]: DEBUG oslo_concurrency.lockutils [req-fea2793f-6688-41d2-bed6-f0cb3bce7adb req-135068d1-391a-444d-a9d1-96e2c951a155 service nova] Releasing lock "refresh_cache-bb86aabd-129d-4c14-9db1-6676a5e7b9fa" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.006067] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "interface-14772ba8-bde2-42ef-9a37-df876c8af321-dda84b25-2545-4c4c-a7a2-9ed304b5db43" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 781.006362] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "interface-14772ba8-bde2-42ef-9a37-df876c8af321-dda84b25-2545-4c4c-a7a2-9ed304b5db43" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 781.083894] env[68638]: INFO nova.compute.manager [None req-1676e2cb-ff60-419e-bf1f-b4afa64c169d tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Detaching volume 2b7daa37-8901-45a9-b233-21a7aa53a70a [ 781.120366] env[68638]: INFO nova.virt.block_device [None req-1676e2cb-ff60-419e-bf1f-b4afa64c169d tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Attempting to driver detach volume 2b7daa37-8901-45a9-b233-21a7aa53a70a from mountpoint /dev/sdb [ 781.120732] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1676e2cb-ff60-419e-bf1f-b4afa64c169d tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Volume detach. Driver type: vmdk {{(pid=68638) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 781.120812] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1676e2cb-ff60-419e-bf1f-b4afa64c169d tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569882', 'volume_id': '2b7daa37-8901-45a9-b233-21a7aa53a70a', 'name': 'volume-2b7daa37-8901-45a9-b233-21a7aa53a70a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c80895d5-1a59-4779-9da9-9aeec10bc395', 'attached_at': '', 'detached_at': '', 'volume_id': '2b7daa37-8901-45a9-b233-21a7aa53a70a', 'serial': '2b7daa37-8901-45a9-b233-21a7aa53a70a'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 781.121844] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dc50e56-586f-45f8-9707-51b3ece6920d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.145267] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e375070e-4507-4876-ab09-101a6511c389 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.149991] env[68638]: DEBUG nova.network.neutron [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 781.157475] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75817605-f7e9-433d-9f6e-48e714ea4f0a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.183372] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbb73ab5-656c-4ba8-80be-6ea8515faec6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.200646] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1676e2cb-ff60-419e-bf1f-b4afa64c169d tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] The volume has not been displaced from its original location: [datastore2] volume-2b7daa37-8901-45a9-b233-21a7aa53a70a/volume-2b7daa37-8901-45a9-b233-21a7aa53a70a.vmdk. No consolidation needed. {{(pid=68638) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 781.206242] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1676e2cb-ff60-419e-bf1f-b4afa64c169d tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Reconfiguring VM instance instance-00000007 to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 781.206598] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d62b0a2-17be-4d67-ad3d-583ec1761438 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.228381] env[68638]: DEBUG oslo_vmware.api [None req-1676e2cb-ff60-419e-bf1f-b4afa64c169d tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for the task: (returnval){ [ 781.228381] env[68638]: value = "task-2833554" [ 781.228381] env[68638]: _type = "Task" [ 781.228381] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.234500] env[68638]: DEBUG oslo_vmware.api [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833553, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.72082} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.235778] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] bb86aabd-129d-4c14-9db1-6676a5e7b9fa/bb86aabd-129d-4c14-9db1-6676a5e7b9fa.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 781.235778] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 781.235778] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4cef34f6-ce36-41b5-a048-3ff00e00d050 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.240938] env[68638]: DEBUG oslo_vmware.api [None req-1676e2cb-ff60-419e-bf1f-b4afa64c169d tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833554, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.251215] env[68638]: DEBUG oslo_vmware.api [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 781.251215] env[68638]: value = "task-2833555" [ 781.251215] env[68638]: _type = "Task" [ 781.251215] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.268710] env[68638]: DEBUG oslo_vmware.api [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833555, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.326876] env[68638]: DEBUG oslo_concurrency.lockutils [None req-64b80675-a657-4c00-ab1f-8bdbb9ed5b9c tempest-VolumesAssistedSnapshotsTest-1834165575 tempest-VolumesAssistedSnapshotsTest-1834165575-project-member] Lock "ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.768s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 781.368862] env[68638]: INFO nova.compute.manager [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Took 47.84 seconds to build instance. [ 781.387105] env[68638]: DEBUG nova.network.neutron [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Updating instance_info_cache with network_info: [{"id": "d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3", "address": "fa:16:3e:9a:9b:14", "network": {"id": "c31bf1cd-7568-43c6-9d99-a1e4d63a62a6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1277511990-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa16293a678b4a35ac0837f6ce904e48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2378ad7-a6", "ovs_interfaceid": "d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.512132] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "14772ba8-bde2-42ef-9a37-df876c8af321" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.512340] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired lock "14772ba8-bde2-42ef-9a37-df876c8af321" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 781.513482] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe2bc27c-c950-40db-b76a-01ed9850efa2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.532918] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f75459-f040-4b26-b35d-02a046671bef {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.561731] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 781.584040] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Reconfiguring VM to detach interface {{(pid=68638) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 781.584040] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 781.585639] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-786ada1c-d627-4c91-8ffa-d8230021d22b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.620952] env[68638]: DEBUG oslo_vmware.api [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 781.620952] env[68638]: value = "task-2833556" [ 781.620952] env[68638]: _type = "Task" [ 781.620952] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.633822] env[68638]: DEBUG oslo_vmware.api [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833556, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.740913] env[68638]: DEBUG oslo_vmware.api [None req-1676e2cb-ff60-419e-bf1f-b4afa64c169d tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833554, 'name': ReconfigVM_Task, 'duration_secs': 0.442997} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.741222] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1676e2cb-ff60-419e-bf1f-b4afa64c169d tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Reconfigured VM instance instance-00000007 to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 781.745987] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-079f35ca-fbce-4faa-9abc-53eaaaf75bd1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.773524] env[68638]: DEBUG oslo_vmware.api [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833555, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081367} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.773524] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 781.773524] env[68638]: DEBUG oslo_vmware.api [None req-1676e2cb-ff60-419e-bf1f-b4afa64c169d tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for the task: (returnval){ [ 781.773524] env[68638]: value = "task-2833557" [ 781.773524] env[68638]: _type = "Task" [ 781.773524] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.773524] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3772eec4-dcb6-4bdf-b000-e1afba05f65a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.785587] env[68638]: DEBUG oslo_vmware.api [None req-1676e2cb-ff60-419e-bf1f-b4afa64c169d tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833557, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.805117] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] bb86aabd-129d-4c14-9db1-6676a5e7b9fa/bb86aabd-129d-4c14-9db1-6676a5e7b9fa.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 781.808633] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad964b36-22cc-4e2a-b823-62c07416b76e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.833305] env[68638]: DEBUG oslo_vmware.api [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 781.833305] env[68638]: value = "task-2833558" [ 781.833305] env[68638]: _type = "Task" [ 781.833305] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.845022] env[68638]: DEBUG oslo_vmware.api [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833558, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.870900] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ff98091-c80f-4a4d-82d2-f5725e4967ef tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Lock "da306fdd-a5b4-4275-a482-f77cc008d780" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 101.148s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 781.887956] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Releasing lock "refresh_cache-90c192bd-b823-414c-b793-260eacc9904f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.888326] env[68638]: DEBUG nova.compute.manager [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Instance network_info: |[{"id": "d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3", "address": "fa:16:3e:9a:9b:14", "network": {"id": "c31bf1cd-7568-43c6-9d99-a1e4d63a62a6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1277511990-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa16293a678b4a35ac0837f6ce904e48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2378ad7-a6", "ovs_interfaceid": "d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 781.889021] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:9b:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b5a629f-6902-4d30-9278-74b443a8371d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 781.896648] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Creating folder: Project (aa16293a678b4a35ac0837f6ce904e48). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 781.899608] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-af50aff5-507f-4ae3-8648-a1023b66f65b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.914788] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Created folder: Project (aa16293a678b4a35ac0837f6ce904e48) in parent group-v569734. [ 781.915073] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Creating folder: Instances. Parent ref: group-v569887. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 781.915465] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1326cc45-744a-4b8b-a71e-a471ba54dd14 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.935730] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Created folder: Instances in parent group-v569887. [ 781.936104] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 781.936331] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 781.936588] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-937d0ec1-91d5-430f-ba5a-778fbb68c1f9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.964027] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 781.964027] env[68638]: value = "task-2833561" [ 781.964027] env[68638]: _type = "Task" [ 781.964027] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.976531] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833561, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.128473] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 782.128678] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 782.129277] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 782.129525] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 782.129832] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 782.130071] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 782.130153] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68638) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 782.130527] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager.update_available_resource {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 782.141357] env[68638]: DEBUG oslo_vmware.api [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.287047] env[68638]: DEBUG oslo_vmware.api [None req-1676e2cb-ff60-419e-bf1f-b4afa64c169d tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833557, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.345017] env[68638]: DEBUG oslo_vmware.api [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833558, 'name': ReconfigVM_Task, 'duration_secs': 0.292246} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.345313] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Reconfigured VM instance instance-00000032 to attach disk [datastore1] bb86aabd-129d-4c14-9db1-6676a5e7b9fa/bb86aabd-129d-4c14-9db1-6676a5e7b9fa.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 782.345959] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1c982143-aad2-4bea-b653-d0f75d0447f7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.355540] env[68638]: DEBUG oslo_vmware.api [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 782.355540] env[68638]: value = "task-2833562" [ 782.355540] env[68638]: _type = "Task" [ 782.355540] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.368105] env[68638]: DEBUG oslo_vmware.api [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833562, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.375249] env[68638]: DEBUG nova.compute.manager [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 782.427163] env[68638]: DEBUG oslo_concurrency.lockutils [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Acquiring lock "da306fdd-a5b4-4275-a482-f77cc008d780" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 782.427163] env[68638]: DEBUG oslo_concurrency.lockutils [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Lock "da306fdd-a5b4-4275-a482-f77cc008d780" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 782.427163] env[68638]: DEBUG oslo_concurrency.lockutils [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Acquiring lock "da306fdd-a5b4-4275-a482-f77cc008d780-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 782.427163] env[68638]: DEBUG oslo_concurrency.lockutils [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Lock "da306fdd-a5b4-4275-a482-f77cc008d780-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 782.427163] env[68638]: DEBUG oslo_concurrency.lockutils [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Lock "da306fdd-a5b4-4275-a482-f77cc008d780-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 782.431825] env[68638]: INFO nova.compute.manager [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Terminating instance [ 782.460472] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f696ea1a-8659-4f1c-b777-ed73fc675c61 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.471991] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c996c231-c1b1-4c9d-92e2-b00a93c0b9a7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.478766] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833561, 'name': CreateVM_Task, 'duration_secs': 0.378273} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.479415] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 782.480157] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.480332] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 782.480653] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 782.481275] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8ad178b-335d-491b-b7c0-8e7009f9492a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.510942] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22327534-e943-4ca6-a763-bcf2c9fbdb7d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.518287] env[68638]: DEBUG oslo_vmware.api [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 782.518287] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52c92b16-3ce2-259a-2a35-bb67a296bd3c" [ 782.518287] env[68638]: _type = "Task" [ 782.518287] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.528132] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1337c2b-3914-43b1-808b-06eadb2865af {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.536032] env[68638]: DEBUG oslo_vmware.api [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52c92b16-3ce2-259a-2a35-bb67a296bd3c, 'name': SearchDatastore_Task, 'duration_secs': 0.010757} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.536032] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 782.536032] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 782.536032] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.536032] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 782.536032] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 782.536032] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a375203f-b8c0-4e52-b3d8-a566130afd18 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.544940] env[68638]: DEBUG nova.compute.provider_tree [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 782.556900] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 782.557083] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 782.558057] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2d7f2bc-9f5a-4836-af34-ff6c59d3b168 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.566772] env[68638]: DEBUG oslo_vmware.api [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 782.566772] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]523d41c0-679a-0519-7603-45e63db009a6" [ 782.566772] env[68638]: _type = "Task" [ 782.566772] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.573341] env[68638]: DEBUG nova.compute.manager [req-55b3872e-8138-4f69-82b4-a97e58cfa37c req-ac57042a-60ba-47de-a4ec-74b2e83e1622 service nova] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Received event network-changed-d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 782.573530] env[68638]: DEBUG nova.compute.manager [req-55b3872e-8138-4f69-82b4-a97e58cfa37c req-ac57042a-60ba-47de-a4ec-74b2e83e1622 service nova] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Refreshing instance network info cache due to event network-changed-d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 782.573739] env[68638]: DEBUG oslo_concurrency.lockutils [req-55b3872e-8138-4f69-82b4-a97e58cfa37c req-ac57042a-60ba-47de-a4ec-74b2e83e1622 service nova] Acquiring lock "refresh_cache-90c192bd-b823-414c-b793-260eacc9904f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.573878] env[68638]: DEBUG oslo_concurrency.lockutils [req-55b3872e-8138-4f69-82b4-a97e58cfa37c req-ac57042a-60ba-47de-a4ec-74b2e83e1622 service nova] Acquired lock "refresh_cache-90c192bd-b823-414c-b793-260eacc9904f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 782.574053] env[68638]: DEBUG nova.network.neutron [req-55b3872e-8138-4f69-82b4-a97e58cfa37c req-ac57042a-60ba-47de-a4ec-74b2e83e1622 service nova] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Refreshing network info cache for port d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 782.581102] env[68638]: DEBUG oslo_vmware.api [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523d41c0-679a-0519-7603-45e63db009a6, 'name': SearchDatastore_Task, 'duration_secs': 0.010166} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.581880] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d45d1418-c728-4c97-a39d-a97770b7aa36 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.587903] env[68638]: DEBUG oslo_vmware.api [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 782.587903] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52066a69-52af-b6fd-03f3-590b1a96520e" [ 782.587903] env[68638]: _type = "Task" [ 782.587903] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.597955] env[68638]: DEBUG oslo_vmware.api [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52066a69-52af-b6fd-03f3-590b1a96520e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.634130] env[68638]: DEBUG oslo_vmware.api [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.643279] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 782.787141] env[68638]: DEBUG oslo_vmware.api [None req-1676e2cb-ff60-419e-bf1f-b4afa64c169d tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833557, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.865996] env[68638]: DEBUG oslo_vmware.api [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833562, 'name': Rename_Task, 'duration_secs': 0.158813} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.866499] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 782.866766] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bc3015c8-8f53-4f89-b6be-7269cb3a9fea {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.875936] env[68638]: DEBUG oslo_vmware.api [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 782.875936] env[68638]: value = "task-2833563" [ 782.875936] env[68638]: _type = "Task" [ 782.875936] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.888205] env[68638]: DEBUG oslo_vmware.api [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833563, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.900670] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 782.938983] env[68638]: DEBUG nova.compute.manager [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 782.939497] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 782.941020] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d10d95ad-bee8-4a54-bad8-177eeee109b0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.954132] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 782.954537] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bc026435-a6f5-4c59-9944-21de846d3cf2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.965732] env[68638]: DEBUG oslo_vmware.api [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Waiting for the task: (returnval){ [ 782.965732] env[68638]: value = "task-2833564" [ 782.965732] env[68638]: _type = "Task" [ 782.965732] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.978549] env[68638]: DEBUG oslo_vmware.api [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Task: {'id': task-2833564, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.049102] env[68638]: DEBUG nova.scheduler.client.report [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 783.103853] env[68638]: DEBUG oslo_vmware.api [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52066a69-52af-b6fd-03f3-590b1a96520e, 'name': SearchDatastore_Task, 'duration_secs': 0.011455} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.103944] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 783.104298] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 90c192bd-b823-414c-b793-260eacc9904f/90c192bd-b823-414c-b793-260eacc9904f.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 783.104594] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7b06d5ce-233c-42da-95e0-103db95d2ca8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.118867] env[68638]: DEBUG oslo_vmware.api [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 783.118867] env[68638]: value = "task-2833565" [ 783.118867] env[68638]: _type = "Task" [ 783.118867] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.137993] env[68638]: DEBUG oslo_vmware.api [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2833565, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.145917] env[68638]: DEBUG oslo_vmware.api [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.288612] env[68638]: DEBUG oslo_vmware.api [None req-1676e2cb-ff60-419e-bf1f-b4afa64c169d tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833557, 'name': ReconfigVM_Task, 'duration_secs': 1.218934} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.288612] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1676e2cb-ff60-419e-bf1f-b4afa64c169d tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569882', 'volume_id': '2b7daa37-8901-45a9-b233-21a7aa53a70a', 'name': 'volume-2b7daa37-8901-45a9-b233-21a7aa53a70a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c80895d5-1a59-4779-9da9-9aeec10bc395', 'attached_at': '', 'detached_at': '', 'volume_id': '2b7daa37-8901-45a9-b233-21a7aa53a70a', 'serial': '2b7daa37-8901-45a9-b233-21a7aa53a70a'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 783.383129] env[68638]: DEBUG nova.network.neutron [req-55b3872e-8138-4f69-82b4-a97e58cfa37c req-ac57042a-60ba-47de-a4ec-74b2e83e1622 service nova] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Updated VIF entry in instance network info cache for port d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 783.383618] env[68638]: DEBUG nova.network.neutron [req-55b3872e-8138-4f69-82b4-a97e58cfa37c req-ac57042a-60ba-47de-a4ec-74b2e83e1622 service nova] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Updating instance_info_cache with network_info: [{"id": "d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3", "address": "fa:16:3e:9a:9b:14", "network": {"id": "c31bf1cd-7568-43c6-9d99-a1e4d63a62a6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1277511990-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa16293a678b4a35ac0837f6ce904e48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2378ad7-a6", "ovs_interfaceid": "d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.393304] env[68638]: DEBUG oslo_vmware.api [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833563, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.478798] env[68638]: DEBUG oslo_vmware.api [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Task: {'id': task-2833564, 'name': PowerOffVM_Task, 'duration_secs': 0.210896} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.479226] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 783.479473] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 783.479764] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9cf34b5e-865c-4b4a-af46-60d86bca3f44 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.557731] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.763s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.558532] env[68638]: DEBUG nova.compute.manager [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 783.565357] env[68638]: DEBUG oslo_concurrency.lockutils [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 36.390s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.565357] env[68638]: DEBUG nova.objects.instance [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68638) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 783.566883] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 783.567137] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 783.567477] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Deleting the datastore file [datastore1] da306fdd-a5b4-4275-a482-f77cc008d780 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 783.568095] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e89c4107-a61b-4396-b722-8aa85d0efdb3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.582013] env[68638]: DEBUG oslo_vmware.api [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Waiting for the task: (returnval){ [ 783.582013] env[68638]: value = "task-2833567" [ 783.582013] env[68638]: _type = "Task" [ 783.582013] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.593979] env[68638]: DEBUG oslo_vmware.api [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Task: {'id': task-2833567, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.634372] env[68638]: DEBUG oslo_vmware.api [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2833565, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.640594] env[68638]: DEBUG oslo_vmware.api [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.860897] env[68638]: DEBUG nova.objects.instance [None req-1676e2cb-ff60-419e-bf1f-b4afa64c169d tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lazy-loading 'flavor' on Instance uuid c80895d5-1a59-4779-9da9-9aeec10bc395 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 783.888008] env[68638]: DEBUG oslo_concurrency.lockutils [req-55b3872e-8138-4f69-82b4-a97e58cfa37c req-ac57042a-60ba-47de-a4ec-74b2e83e1622 service nova] Releasing lock "refresh_cache-90c192bd-b823-414c-b793-260eacc9904f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 783.888425] env[68638]: DEBUG oslo_vmware.api [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833563, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.070204] env[68638]: DEBUG nova.compute.utils [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 784.071636] env[68638]: DEBUG nova.compute.manager [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 784.071901] env[68638]: DEBUG nova.network.neutron [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 784.094360] env[68638]: DEBUG oslo_vmware.api [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Task: {'id': task-2833567, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172752} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.094699] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 784.094961] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 784.095099] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 784.095257] env[68638]: INFO nova.compute.manager [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Took 1.16 seconds to destroy the instance on the hypervisor. [ 784.095531] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 784.095730] env[68638]: DEBUG nova.compute.manager [-] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 784.095843] env[68638]: DEBUG nova.network.neutron [-] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 784.109685] env[68638]: DEBUG nova.policy [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '847f535ec96f4ef0b73ae277199b4533', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98a35cb6ae4d4c8688fb89d7da0b2dd1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 784.132717] env[68638]: DEBUG oslo_vmware.api [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2833565, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534713} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.133513] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 90c192bd-b823-414c-b793-260eacc9904f/90c192bd-b823-414c-b793-260eacc9904f.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 784.133743] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 784.134041] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b277cf62-5b65-4424-9378-a89e7dff7d18 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.139403] env[68638]: DEBUG oslo_vmware.api [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.148429] env[68638]: DEBUG oslo_vmware.api [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 784.148429] env[68638]: value = "task-2833568" [ 784.148429] env[68638]: _type = "Task" [ 784.148429] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.164279] env[68638]: DEBUG oslo_vmware.api [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2833568, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.390214] env[68638]: DEBUG oslo_vmware.api [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833563, 'name': PowerOnVM_Task, 'duration_secs': 1.307526} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.394760] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 784.394823] env[68638]: INFO nova.compute.manager [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Took 7.42 seconds to spawn the instance on the hypervisor. [ 784.395044] env[68638]: DEBUG nova.compute.manager [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 784.396105] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa092b78-f927-4460-a4b6-22efcdefc328 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.472812] env[68638]: DEBUG nova.network.neutron [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Successfully created port: 0f78cf8f-95cb-4e44-8401-00f04386577b {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 784.575478] env[68638]: DEBUG nova.compute.manager [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 784.582850] env[68638]: DEBUG oslo_concurrency.lockutils [None req-85148764-e07c-4d77-bd11-4400262c9b6b tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 784.588413] env[68638]: DEBUG nova.compute.manager [req-839c3646-8ce7-434e-9f25-7d21429643ce req-2c9af842-f90d-4a9e-a453-f78739bc2671 service nova] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Received event network-vif-deleted-c813e7f7-aecf-49ab-a677-4b2109dce440 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 784.589103] env[68638]: INFO nova.compute.manager [req-839c3646-8ce7-434e-9f25-7d21429643ce req-2c9af842-f90d-4a9e-a453-f78739bc2671 service nova] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Neutron deleted interface c813e7f7-aecf-49ab-a677-4b2109dce440; detaching it from the instance and deleting it from the info cache [ 784.589348] env[68638]: DEBUG nova.network.neutron [req-839c3646-8ce7-434e-9f25-7d21429643ce req-2c9af842-f90d-4a9e-a453-f78739bc2671 service nova] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.591791] env[68638]: DEBUG oslo_concurrency.lockutils [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.411s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 784.592032] env[68638]: DEBUG nova.objects.instance [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Lazy-loading 'resources' on Instance uuid 072be237-c51e-43d2-ad84-46122ef9f335 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 784.639484] env[68638]: DEBUG oslo_vmware.api [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.661217] env[68638]: DEBUG oslo_vmware.api [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2833568, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078316} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.661217] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 784.661217] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c432770b-5f96-4b60-a465-f4af5bd33e6c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.687216] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] 90c192bd-b823-414c-b793-260eacc9904f/90c192bd-b823-414c-b793-260eacc9904f.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 784.687216] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35e51f76-67e1-4db8-a31f-db85dce2385c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.710538] env[68638]: DEBUG oslo_vmware.api [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 784.710538] env[68638]: value = "task-2833569" [ 784.710538] env[68638]: _type = "Task" [ 784.710538] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.725074] env[68638]: DEBUG oslo_vmware.api [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2833569, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.868417] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1676e2cb-ff60-419e-bf1f-b4afa64c169d tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "c80895d5-1a59-4779-9da9-9aeec10bc395" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.288s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 784.928762] env[68638]: INFO nova.compute.manager [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Took 48.12 seconds to build instance. [ 785.038140] env[68638]: DEBUG nova.network.neutron [-] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.092962] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d8a6089f-c13c-4ea8-895d-7ec73471718d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.109652] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab4038e-98e5-4e8e-ad8c-38179debbeb7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.140350] env[68638]: DEBUG oslo_vmware.api [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.156264] env[68638]: DEBUG nova.compute.manager [req-839c3646-8ce7-434e-9f25-7d21429643ce req-2c9af842-f90d-4a9e-a453-f78739bc2671 service nova] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Detach interface failed, port_id=c813e7f7-aecf-49ab-a677-4b2109dce440, reason: Instance da306fdd-a5b4-4275-a482-f77cc008d780 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 785.227049] env[68638]: DEBUG oslo_vmware.api [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2833569, 'name': ReconfigVM_Task, 'duration_secs': 0.335515} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.227049] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Reconfigured VM instance instance-00000033 to attach disk [datastore2] 90c192bd-b823-414c-b793-260eacc9904f/90c192bd-b823-414c-b793-260eacc9904f.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 785.227661] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a206c738-3ae2-4dec-b74e-41c5feda719f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.241433] env[68638]: DEBUG oslo_vmware.api [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 785.241433] env[68638]: value = "task-2833570" [ 785.241433] env[68638]: _type = "Task" [ 785.241433] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.252515] env[68638]: DEBUG oslo_vmware.api [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2833570, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.430129] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b09f06f-5a36-4d1c-bbbd-29a8d6d821fb tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "bb86aabd-129d-4c14-9db1-6676a5e7b9fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.298s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.541183] env[68638]: INFO nova.compute.manager [-] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Took 1.45 seconds to deallocate network for instance. [ 785.588695] env[68638]: DEBUG nova.compute.manager [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 785.617802] env[68638]: DEBUG nova.virt.hardware [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 785.618126] env[68638]: DEBUG nova.virt.hardware [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 785.618348] env[68638]: DEBUG nova.virt.hardware [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 785.618643] env[68638]: DEBUG nova.virt.hardware [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 785.618888] env[68638]: DEBUG nova.virt.hardware [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 785.619103] env[68638]: DEBUG nova.virt.hardware [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 785.619450] env[68638]: DEBUG nova.virt.hardware [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 785.619635] env[68638]: DEBUG nova.virt.hardware [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 785.619813] env[68638]: DEBUG nova.virt.hardware [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 785.619979] env[68638]: DEBUG nova.virt.hardware [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 785.620427] env[68638]: DEBUG nova.virt.hardware [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 785.621395] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-614d0cce-b205-4fcb-9564-eb564a23565d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.636387] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-153ffbd8-886b-4b68-8a13-f9cab91c73e8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.644871] env[68638]: DEBUG oslo_vmware.api [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.761051] env[68638]: DEBUG oslo_vmware.api [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2833570, 'name': Rename_Task, 'duration_secs': 0.192713} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.765107] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 785.765695] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8ce1a1fe-42cf-4d50-b628-95e19fd7d86e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.775956] env[68638]: DEBUG oslo_vmware.api [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 785.775956] env[68638]: value = "task-2833571" [ 785.775956] env[68638]: _type = "Task" [ 785.775956] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.790274] env[68638]: DEBUG oslo_vmware.api [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2833571, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.792506] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84bdb888-83cc-431f-95f4-55753d17c07b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.800485] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f675c37-c0f2-4b97-9463-230aca92814d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.835909] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2968a986-09b1-4fb1-911c-9bc5d6e08602 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.844972] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e08be3e5-3a7a-4e49-b810-38ec17f54109 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.860989] env[68638]: DEBUG nova.compute.provider_tree [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 785.936579] env[68638]: DEBUG nova.compute.manager [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 786.053332] env[68638]: DEBUG oslo_concurrency.lockutils [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.150273] env[68638]: DEBUG oslo_vmware.api [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.297832] env[68638]: DEBUG oslo_vmware.api [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2833571, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.305786] env[68638]: DEBUG oslo_concurrency.lockutils [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquiring lock "17f6cd0a-bbc1-47c3-9c36-2166ba448de2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.305786] env[68638]: DEBUG oslo_concurrency.lockutils [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "17f6cd0a-bbc1-47c3-9c36-2166ba448de2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 786.308769] env[68638]: DEBUG nova.network.neutron [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Successfully updated port: 0f78cf8f-95cb-4e44-8401-00f04386577b {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 786.364270] env[68638]: DEBUG nova.scheduler.client.report [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 786.457494] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.641315] env[68638]: DEBUG oslo_vmware.api [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.748212] env[68638]: DEBUG nova.compute.manager [req-9544e9d0-5837-4fde-be86-31fa34d2ca83 req-bdae4d45-facf-4c2a-bb68-e4fc72121c05 service nova] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Received event network-vif-plugged-0f78cf8f-95cb-4e44-8401-00f04386577b {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 786.748457] env[68638]: DEBUG oslo_concurrency.lockutils [req-9544e9d0-5837-4fde-be86-31fa34d2ca83 req-bdae4d45-facf-4c2a-bb68-e4fc72121c05 service nova] Acquiring lock "772af0c0-a8dd-4167-87bc-617a9d95b54d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.748719] env[68638]: DEBUG oslo_concurrency.lockutils [req-9544e9d0-5837-4fde-be86-31fa34d2ca83 req-bdae4d45-facf-4c2a-bb68-e4fc72121c05 service nova] Lock "772af0c0-a8dd-4167-87bc-617a9d95b54d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 786.748961] env[68638]: DEBUG oslo_concurrency.lockutils [req-9544e9d0-5837-4fde-be86-31fa34d2ca83 req-bdae4d45-facf-4c2a-bb68-e4fc72121c05 service nova] Lock "772af0c0-a8dd-4167-87bc-617a9d95b54d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 786.749190] env[68638]: DEBUG nova.compute.manager [req-9544e9d0-5837-4fde-be86-31fa34d2ca83 req-bdae4d45-facf-4c2a-bb68-e4fc72121c05 service nova] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] No waiting events found dispatching network-vif-plugged-0f78cf8f-95cb-4e44-8401-00f04386577b {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 786.749394] env[68638]: WARNING nova.compute.manager [req-9544e9d0-5837-4fde-be86-31fa34d2ca83 req-bdae4d45-facf-4c2a-bb68-e4fc72121c05 service nova] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Received unexpected event network-vif-plugged-0f78cf8f-95cb-4e44-8401-00f04386577b for instance with vm_state building and task_state spawning. [ 786.793531] env[68638]: DEBUG oslo_vmware.api [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2833571, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.811383] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "refresh_cache-772af0c0-a8dd-4167-87bc-617a9d95b54d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.811536] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired lock "refresh_cache-772af0c0-a8dd-4167-87bc-617a9d95b54d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 786.811684] env[68638]: DEBUG nova.network.neutron [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 786.869732] env[68638]: DEBUG oslo_concurrency.lockutils [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.278s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 786.872585] env[68638]: DEBUG oslo_concurrency.lockutils [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.451s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 786.873908] env[68638]: DEBUG nova.objects.instance [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Lazy-loading 'resources' on Instance uuid b9736ec5-6332-4202-95d6-a3cd1d1f11d7 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 786.890908] env[68638]: INFO nova.scheduler.client.report [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Deleted allocations for instance 072be237-c51e-43d2-ad84-46122ef9f335 [ 786.955020] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "6200613c-b5de-4774-b0c6-fdb78b4c7267" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.955020] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "6200613c-b5de-4774-b0c6-fdb78b4c7267" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 787.143302] env[68638]: DEBUG oslo_vmware.api [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833556, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.298363] env[68638]: DEBUG oslo_vmware.api [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2833571, 'name': PowerOnVM_Task, 'duration_secs': 1.309962} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.298667] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 787.298883] env[68638]: INFO nova.compute.manager [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Took 7.68 seconds to spawn the instance on the hypervisor. [ 787.299068] env[68638]: DEBUG nova.compute.manager [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 787.299959] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e310d216-f629-4c26-928d-ea187f5d63e3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.403380] env[68638]: DEBUG oslo_concurrency.lockutils [None req-de3e8463-29d2-437c-bab3-9bad84fa2470 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Lock "072be237-c51e-43d2-ad84-46122ef9f335" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.288s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 787.447405] env[68638]: DEBUG nova.network.neutron [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 787.644809] env[68638]: DEBUG oslo_vmware.api [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833556, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.651678] env[68638]: DEBUG nova.network.neutron [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Updating instance_info_cache with network_info: [{"id": "0f78cf8f-95cb-4e44-8401-00f04386577b", "address": "fa:16:3e:93:ea:87", "network": {"id": "ad22ed5c-0d03-45c8-8bc4-c4f51dbac4fc", "bridge": "br-int", "label": "tempest-ServersTestJSON-2147381832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98a35cb6ae4d4c8688fb89d7da0b2dd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f78cf8f-95", "ovs_interfaceid": "0f78cf8f-95cb-4e44-8401-00f04386577b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.821809] env[68638]: INFO nova.compute.manager [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Took 46.20 seconds to build instance. [ 788.056684] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bbc7c97-0f70-404b-99cf-5c6ea5e8f9b1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.065218] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a73ef4-393e-4575-a6c0-4256d3c5a8bc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.099424] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b819579-fbca-4e66-8486-6b771ae5433b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.108068] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b0d6d26-d4e3-4b94-a729-b7bc55b08fea {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.129048] env[68638]: DEBUG nova.compute.provider_tree [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 788.145059] env[68638]: DEBUG oslo_vmware.api [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833556, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.154025] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Releasing lock "refresh_cache-772af0c0-a8dd-4167-87bc-617a9d95b54d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 788.154368] env[68638]: DEBUG nova.compute.manager [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Instance network_info: |[{"id": "0f78cf8f-95cb-4e44-8401-00f04386577b", "address": "fa:16:3e:93:ea:87", "network": {"id": "ad22ed5c-0d03-45c8-8bc4-c4f51dbac4fc", "bridge": "br-int", "label": "tempest-ServersTestJSON-2147381832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98a35cb6ae4d4c8688fb89d7da0b2dd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f78cf8f-95", "ovs_interfaceid": "0f78cf8f-95cb-4e44-8401-00f04386577b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 788.154789] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:ea:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '46e1fc20-2067-4e1a-9812-702772a2c82c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f78cf8f-95cb-4e44-8401-00f04386577b', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 788.162385] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 788.162898] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 788.163175] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da037406-89f6-4865-9cc6-dc37f1b6addd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.183981] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 788.183981] env[68638]: value = "task-2833572" [ 788.183981] env[68638]: _type = "Task" [ 788.183981] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.192045] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833572, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.323870] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6ea473cc-c117-4d13-9291-3129eef360ad tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "90c192bd-b823-414c-b793-260eacc9904f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.817s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 788.332677] env[68638]: DEBUG oslo_concurrency.lockutils [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Acquiring lock "be761cf1-0949-42c0-8a38-58af33113a03" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 788.333917] env[68638]: DEBUG oslo_concurrency.lockutils [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Lock "be761cf1-0949-42c0-8a38-58af33113a03" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 788.334237] env[68638]: DEBUG oslo_concurrency.lockutils [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Acquiring lock "be761cf1-0949-42c0-8a38-58af33113a03-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 788.334564] env[68638]: DEBUG oslo_concurrency.lockutils [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Lock "be761cf1-0949-42c0-8a38-58af33113a03-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 788.334699] env[68638]: DEBUG oslo_concurrency.lockutils [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Lock "be761cf1-0949-42c0-8a38-58af33113a03-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 788.337031] env[68638]: INFO nova.compute.manager [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Terminating instance [ 788.634265] env[68638]: DEBUG nova.scheduler.client.report [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 788.646654] env[68638]: DEBUG oslo_vmware.api [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833556, 'name': ReconfigVM_Task, 'duration_secs': 6.983298} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.646882] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Releasing lock "14772ba8-bde2-42ef-9a37-df876c8af321" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 788.647093] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Reconfigured VM to detach interface {{(pid=68638) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 788.697269] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833572, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.826628] env[68638]: DEBUG nova.compute.manager [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 788.842315] env[68638]: DEBUG nova.compute.manager [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 788.842924] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 788.845642] env[68638]: DEBUG nova.compute.manager [req-28289d93-f250-4e2e-922e-10a6cbdc3da0 req-d9b7b8af-95e8-463a-a062-22034bd6b360 service nova] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Received event network-changed-0f78cf8f-95cb-4e44-8401-00f04386577b {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 788.845642] env[68638]: DEBUG nova.compute.manager [req-28289d93-f250-4e2e-922e-10a6cbdc3da0 req-d9b7b8af-95e8-463a-a062-22034bd6b360 service nova] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Refreshing instance network info cache due to event network-changed-0f78cf8f-95cb-4e44-8401-00f04386577b. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 788.845642] env[68638]: DEBUG oslo_concurrency.lockutils [req-28289d93-f250-4e2e-922e-10a6cbdc3da0 req-d9b7b8af-95e8-463a-a062-22034bd6b360 service nova] Acquiring lock "refresh_cache-772af0c0-a8dd-4167-87bc-617a9d95b54d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.845642] env[68638]: DEBUG oslo_concurrency.lockutils [req-28289d93-f250-4e2e-922e-10a6cbdc3da0 req-d9b7b8af-95e8-463a-a062-22034bd6b360 service nova] Acquired lock "refresh_cache-772af0c0-a8dd-4167-87bc-617a9d95b54d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 788.845642] env[68638]: DEBUG nova.network.neutron [req-28289d93-f250-4e2e-922e-10a6cbdc3da0 req-d9b7b8af-95e8-463a-a062-22034bd6b360 service nova] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Refreshing network info cache for port 0f78cf8f-95cb-4e44-8401-00f04386577b {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 788.847673] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb259cf-44dc-41dd-8ed8-a71dacb65995 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.857109] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 788.858631] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de4f2eaf-9519-4ee0-a024-c6bcd0f497ba {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.871299] env[68638]: DEBUG oslo_vmware.api [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Waiting for the task: (returnval){ [ 788.871299] env[68638]: value = "task-2833573" [ 788.871299] env[68638]: _type = "Task" [ 788.871299] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.884122] env[68638]: DEBUG oslo_vmware.api [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833573, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.144663] env[68638]: DEBUG oslo_concurrency.lockutils [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.270s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 789.145581] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.726s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 789.150216] env[68638]: INFO nova.compute.claims [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 789.176557] env[68638]: INFO nova.scheduler.client.report [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Deleted allocations for instance b9736ec5-6332-4202-95d6-a3cd1d1f11d7 [ 789.198827] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833572, 'name': CreateVM_Task, 'duration_secs': 0.646471} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.198827] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 789.199447] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.199758] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 789.200320] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 789.200718] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e648199-cb8e-490d-88f7-b26c06d97bfb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.210285] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 789.210285] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]524423d8-f64a-71fb-66ce-dce3886c27b4" [ 789.210285] env[68638]: _type = "Task" [ 789.210285] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.220050] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]524423d8-f64a-71fb-66ce-dce3886c27b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.349987] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 789.379562] env[68638]: DEBUG oslo_vmware.api [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833573, 'name': PowerOffVM_Task, 'duration_secs': 0.279443} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.379853] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 789.380040] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 789.380309] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-336179fe-9d10-4c94-b372-ad9031e60264 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.458790] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 789.459042] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 789.459242] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Deleting the datastore file [datastore2] be761cf1-0949-42c0-8a38-58af33113a03 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 789.459689] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a173787f-4443-4f1c-9053-0926ce1a466f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.468209] env[68638]: DEBUG oslo_vmware.api [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Waiting for the task: (returnval){ [ 789.468209] env[68638]: value = "task-2833575" [ 789.468209] env[68638]: _type = "Task" [ 789.468209] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.480136] env[68638]: DEBUG nova.compute.manager [req-3fffdd1d-a028-4733-a8f5-316ca8d1d92d req-1a95f283-981b-4bb1-8263-d8f81857531f service nova] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Received event network-changed-d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 789.480382] env[68638]: DEBUG nova.compute.manager [req-3fffdd1d-a028-4733-a8f5-316ca8d1d92d req-1a95f283-981b-4bb1-8263-d8f81857531f service nova] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Refreshing instance network info cache due to event network-changed-d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 789.480629] env[68638]: DEBUG oslo_concurrency.lockutils [req-3fffdd1d-a028-4733-a8f5-316ca8d1d92d req-1a95f283-981b-4bb1-8263-d8f81857531f service nova] Acquiring lock "refresh_cache-90c192bd-b823-414c-b793-260eacc9904f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.480804] env[68638]: DEBUG oslo_concurrency.lockutils [req-3fffdd1d-a028-4733-a8f5-316ca8d1d92d req-1a95f283-981b-4bb1-8263-d8f81857531f service nova] Acquired lock "refresh_cache-90c192bd-b823-414c-b793-260eacc9904f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 789.481008] env[68638]: DEBUG nova.network.neutron [req-3fffdd1d-a028-4733-a8f5-316ca8d1d92d req-1a95f283-981b-4bb1-8263-d8f81857531f service nova] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Refreshing network info cache for port d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 789.491327] env[68638]: DEBUG oslo_vmware.api [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833575, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.655677] env[68638]: DEBUG nova.network.neutron [req-28289d93-f250-4e2e-922e-10a6cbdc3da0 req-d9b7b8af-95e8-463a-a062-22034bd6b360 service nova] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Updated VIF entry in instance network info cache for port 0f78cf8f-95cb-4e44-8401-00f04386577b. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 789.656113] env[68638]: DEBUG nova.network.neutron [req-28289d93-f250-4e2e-922e-10a6cbdc3da0 req-d9b7b8af-95e8-463a-a062-22034bd6b360 service nova] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Updating instance_info_cache with network_info: [{"id": "0f78cf8f-95cb-4e44-8401-00f04386577b", "address": "fa:16:3e:93:ea:87", "network": {"id": "ad22ed5c-0d03-45c8-8bc4-c4f51dbac4fc", "bridge": "br-int", "label": "tempest-ServersTestJSON-2147381832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98a35cb6ae4d4c8688fb89d7da0b2dd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f78cf8f-95", "ovs_interfaceid": "0f78cf8f-95cb-4e44-8401-00f04386577b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.687746] env[68638]: DEBUG oslo_concurrency.lockutils [None req-abe06953-c687-40b3-95dc-f373366cadd1 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Lock "b9736ec5-6332-4202-95d6-a3cd1d1f11d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.615s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 789.718780] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]524423d8-f64a-71fb-66ce-dce3886c27b4, 'name': SearchDatastore_Task, 'duration_secs': 0.022267} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.719122] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 789.720025] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 789.720025] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.720178] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 789.720323] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 789.720893] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b0354500-62c4-4eba-bcf1-9c782056564a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.731873] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 789.732017] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 789.732874] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be80330e-ef78-4b22-9915-7c0daba14f8f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.739432] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 789.739432] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52333b6a-ec78-5782-d2b5-34cd15b9a7b9" [ 789.739432] env[68638]: _type = "Task" [ 789.739432] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.748663] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52333b6a-ec78-5782-d2b5-34cd15b9a7b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.944490] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "14772ba8-bde2-42ef-9a37-df876c8af321" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 789.944810] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "14772ba8-bde2-42ef-9a37-df876c8af321" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.003s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 789.948198] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "14772ba8-bde2-42ef-9a37-df876c8af321-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 789.948198] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "14772ba8-bde2-42ef-9a37-df876c8af321-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 789.948198] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "14772ba8-bde2-42ef-9a37-df876c8af321-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 789.948527] env[68638]: INFO nova.compute.manager [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Terminating instance [ 789.980195] env[68638]: DEBUG oslo_vmware.api [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Task: {'id': task-2833575, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.24172} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.980456] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 789.980635] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 789.980802] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 789.981399] env[68638]: INFO nova.compute.manager [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Took 1.14 seconds to destroy the instance on the hypervisor. [ 789.981399] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 789.981531] env[68638]: DEBUG nova.compute.manager [-] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 789.981570] env[68638]: DEBUG nova.network.neutron [-] [instance: be761cf1-0949-42c0-8a38-58af33113a03] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 790.019761] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "refresh_cache-14772ba8-bde2-42ef-9a37-df876c8af321" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.019964] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired lock "refresh_cache-14772ba8-bde2-42ef-9a37-df876c8af321" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 790.020160] env[68638]: DEBUG nova.network.neutron [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 790.160324] env[68638]: DEBUG oslo_concurrency.lockutils [req-28289d93-f250-4e2e-922e-10a6cbdc3da0 req-d9b7b8af-95e8-463a-a062-22034bd6b360 service nova] Releasing lock "refresh_cache-772af0c0-a8dd-4167-87bc-617a9d95b54d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.251938] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52333b6a-ec78-5782-d2b5-34cd15b9a7b9, 'name': SearchDatastore_Task, 'duration_secs': 0.010645} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.255547] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6fecaa7-1486-4535-a112-2c3899ca1e4d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.263913] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 790.263913] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]524b8e39-7c41-f608-eb70-9e174a02f90d" [ 790.263913] env[68638]: _type = "Task" [ 790.263913] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.274035] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]524b8e39-7c41-f608-eb70-9e174a02f90d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.322103] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Acquiring lock "c07f6e3a-86cf-4584-aa5e-5adc4bf086e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.322103] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Lock "c07f6e3a-86cf-4584-aa5e-5adc4bf086e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.331569] env[68638]: DEBUG nova.network.neutron [req-3fffdd1d-a028-4733-a8f5-316ca8d1d92d req-1a95f283-981b-4bb1-8263-d8f81857531f service nova] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Updated VIF entry in instance network info cache for port d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 790.332054] env[68638]: DEBUG nova.network.neutron [req-3fffdd1d-a028-4733-a8f5-316ca8d1d92d req-1a95f283-981b-4bb1-8263-d8f81857531f service nova] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Updating instance_info_cache with network_info: [{"id": "d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3", "address": "fa:16:3e:9a:9b:14", "network": {"id": "c31bf1cd-7568-43c6-9d99-a1e4d63a62a6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1277511990-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa16293a678b4a35ac0837f6ce904e48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2378ad7-a6", "ovs_interfaceid": "d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.451823] env[68638]: DEBUG nova.compute.manager [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 790.452203] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 790.453656] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d077eec-02aa-4c60-b76f-448e6139cd07 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.462143] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 790.464763] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-df31d3b2-41e6-42d6-af1c-5b44f0966f30 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.475336] env[68638]: DEBUG oslo_vmware.api [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 790.475336] env[68638]: value = "task-2833576" [ 790.475336] env[68638]: _type = "Task" [ 790.475336] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.486028] env[68638]: DEBUG oslo_vmware.api [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833576, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.707249] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquiring lock "20f2c343-1f32-4c36-b4a9-8f009b6ac326" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.707688] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Lock "20f2c343-1f32-4c36-b4a9-8f009b6ac326" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.707835] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquiring lock "20f2c343-1f32-4c36-b4a9-8f009b6ac326-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.708097] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Lock "20f2c343-1f32-4c36-b4a9-8f009b6ac326-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.708338] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Lock "20f2c343-1f32-4c36-b4a9-8f009b6ac326-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.713449] env[68638]: INFO nova.compute.manager [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Terminating instance [ 790.730147] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5043e43e-59a2-49ff-9a63-93b6d7adbf56 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.739601] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48bac21e-fd24-4a1c-be97-3cc701bf6b7c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.743607] env[68638]: DEBUG nova.network.neutron [-] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.775312] env[68638]: INFO nova.network.neutron [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Port dda84b25-2545-4c4c-a7a2-9ed304b5db43 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 790.775676] env[68638]: DEBUG nova.network.neutron [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Updating instance_info_cache with network_info: [{"id": "316407a1-ab13-4bd4-98ef-7e090d54399c", "address": "fa:16:3e:ca:38:c2", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap316407a1-ab", "ovs_interfaceid": "316407a1-ab13-4bd4-98ef-7e090d54399c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.782159] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd5e727-d066-4c69-a4da-6f558c8e09a1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.795538] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]524b8e39-7c41-f608-eb70-9e174a02f90d, 'name': SearchDatastore_Task, 'duration_secs': 0.027739} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.797086] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2455d068-e536-48bd-b868-94d5886f0afb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.801798] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.802151] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 772af0c0-a8dd-4167-87bc-617a9d95b54d/772af0c0-a8dd-4167-87bc-617a9d95b54d.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 790.802700] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e039371e-b4f7-4997-bfe1-01ffca9c738e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.819033] env[68638]: DEBUG nova.compute.provider_tree [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 790.821486] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 790.821486] env[68638]: value = "task-2833577" [ 790.821486] env[68638]: _type = "Task" [ 790.821486] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.831138] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833577, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.839087] env[68638]: DEBUG oslo_concurrency.lockutils [req-3fffdd1d-a028-4733-a8f5-316ca8d1d92d req-1a95f283-981b-4bb1-8263-d8f81857531f service nova] Releasing lock "refresh_cache-90c192bd-b823-414c-b793-260eacc9904f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.904133] env[68638]: DEBUG nova.compute.manager [req-58044d0a-510b-48e4-807b-0855905745a7 req-eee961b6-bbdb-43fc-9f2b-46e32dded89a service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Received event network-vif-deleted-dda84b25-2545-4c4c-a7a2-9ed304b5db43 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 790.904375] env[68638]: DEBUG nova.compute.manager [req-58044d0a-510b-48e4-807b-0855905745a7 req-eee961b6-bbdb-43fc-9f2b-46e32dded89a service nova] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Received event network-vif-deleted-9868aa77-d4cb-4432-9b96-1caa6f97fb36 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 790.986225] env[68638]: DEBUG oslo_vmware.api [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833576, 'name': PowerOffVM_Task, 'duration_secs': 0.385914} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.986529] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 790.986712] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 790.986967] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e5eb6fa-ef2f-4659-ad1c-69742c869833 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.071323] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 791.071323] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 791.071488] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Deleting the datastore file [datastore1] 14772ba8-bde2-42ef-9a37-df876c8af321 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 791.071808] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-98560cea-ff14-4050-943c-837ab00bec99 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.080594] env[68638]: DEBUG oslo_vmware.api [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 791.080594] env[68638]: value = "task-2833579" [ 791.080594] env[68638]: _type = "Task" [ 791.080594] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.089428] env[68638]: DEBUG oslo_vmware.api [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833579, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.223508] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquiring lock "refresh_cache-20f2c343-1f32-4c36-b4a9-8f009b6ac326" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.223508] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquired lock "refresh_cache-20f2c343-1f32-4c36-b4a9-8f009b6ac326" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.223508] env[68638]: DEBUG nova.network.neutron [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 791.246399] env[68638]: INFO nova.compute.manager [-] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Took 1.26 seconds to deallocate network for instance. [ 791.286625] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Releasing lock "refresh_cache-14772ba8-bde2-42ef-9a37-df876c8af321" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 791.324200] env[68638]: DEBUG nova.scheduler.client.report [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 791.339632] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833577, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.591956] env[68638]: DEBUG oslo_vmware.api [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833579, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.743821] env[68638]: DEBUG nova.network.neutron [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 791.754322] env[68638]: DEBUG oslo_concurrency.lockutils [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.792201] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5c5ea3f6-7438-48ad-902d-8720147f6aea tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "interface-14772ba8-bde2-42ef-9a37-df876c8af321-dda84b25-2545-4c4c-a7a2-9ed304b5db43" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.785s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 791.801497] env[68638]: DEBUG nova.network.neutron [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.833478] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.688s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 791.835201] env[68638]: DEBUG nova.compute.manager [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 791.837277] env[68638]: DEBUG oslo_concurrency.lockutils [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.603s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 791.838767] env[68638]: INFO nova.compute.claims [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 791.848310] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833577, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.731785} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.848734] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 772af0c0-a8dd-4167-87bc-617a9d95b54d/772af0c0-a8dd-4167-87bc-617a9d95b54d.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 791.849437] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 791.849437] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8d7255ce-2565-46d4-a6a7-1929c25b07f2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.861254] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 791.861254] env[68638]: value = "task-2833580" [ 791.861254] env[68638]: _type = "Task" [ 791.861254] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.874272] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833580, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.093725] env[68638]: DEBUG oslo_vmware.api [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833579, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.681557} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.094064] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 792.094222] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 792.094374] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 792.094616] env[68638]: INFO nova.compute.manager [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Took 1.64 seconds to destroy the instance on the hypervisor. [ 792.094827] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 792.094986] env[68638]: DEBUG nova.compute.manager [-] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 792.095089] env[68638]: DEBUG nova.network.neutron [-] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 792.304777] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Releasing lock "refresh_cache-20f2c343-1f32-4c36-b4a9-8f009b6ac326" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.305437] env[68638]: DEBUG nova.compute.manager [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 792.305640] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 792.306936] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-855a95d1-60d1-4aca-811a-7ca2d0b41db0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.315401] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 792.315685] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7200f79c-8206-4270-a8be-4fb01272f982 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.323509] env[68638]: DEBUG oslo_vmware.api [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 792.323509] env[68638]: value = "task-2833581" [ 792.323509] env[68638]: _type = "Task" [ 792.323509] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.332688] env[68638]: DEBUG oslo_vmware.api [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833581, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.349464] env[68638]: DEBUG nova.compute.utils [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 792.350932] env[68638]: DEBUG nova.compute.manager [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 792.351134] env[68638]: DEBUG nova.network.neutron [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 792.375244] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833580, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07606} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.375984] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 792.376870] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c9dec5-11a4-4c3f-9310-19eca6b4967e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.400861] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] 772af0c0-a8dd-4167-87bc-617a9d95b54d/772af0c0-a8dd-4167-87bc-617a9d95b54d.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 792.406021] env[68638]: DEBUG nova.policy [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '28c09a80775a4919b09d3baae8689650', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd95966c092754deca9ed66c97041235b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 792.406021] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-602865df-41ec-4b24-ab4b-3dd0b39f94cf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.428894] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 792.428894] env[68638]: value = "task-2833582" [ 792.428894] env[68638]: _type = "Task" [ 792.428894] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.440514] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833582, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.685603] env[68638]: DEBUG nova.compute.manager [req-82088293-a907-4c82-8c1e-6ba7a2a876f4 req-5d33fc78-5228-43c6-91c0-6b3628e72cdd service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Received event network-vif-deleted-316407a1-ab13-4bd4-98ef-7e090d54399c {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 792.685813] env[68638]: INFO nova.compute.manager [req-82088293-a907-4c82-8c1e-6ba7a2a876f4 req-5d33fc78-5228-43c6-91c0-6b3628e72cdd service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Neutron deleted interface 316407a1-ab13-4bd4-98ef-7e090d54399c; detaching it from the instance and deleting it from the info cache [ 792.685988] env[68638]: DEBUG nova.network.neutron [req-82088293-a907-4c82-8c1e-6ba7a2a876f4 req-5d33fc78-5228-43c6-91c0-6b3628e72cdd service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.800069] env[68638]: DEBUG nova.network.neutron [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Successfully created port: 7a860c34-618e-494a-9a17-d5a14acf9fb5 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 792.836115] env[68638]: DEBUG oslo_vmware.api [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833581, 'name': PowerOffVM_Task, 'duration_secs': 0.372121} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.836527] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 792.836818] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 792.837568] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-03b92de2-4d6b-4a15-a3db-aa4c55e32d9f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.856265] env[68638]: DEBUG nova.compute.manager [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 792.887017] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 792.887680] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 792.887955] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Deleting the datastore file [datastore1] 20f2c343-1f32-4c36-b4a9-8f009b6ac326 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 792.888276] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f3598167-25d1-4977-9848-30c84ca25120 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.900280] env[68638]: DEBUG oslo_vmware.api [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for the task: (returnval){ [ 792.900280] env[68638]: value = "task-2833584" [ 792.900280] env[68638]: _type = "Task" [ 792.900280] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.911743] env[68638]: DEBUG oslo_vmware.api [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833584, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.943047] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833582, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.091124] env[68638]: DEBUG nova.network.neutron [-] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.190021] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ad13b000-5de9-4620-b67f-13067cfc58eb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.200552] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53651348-daf6-4cea-8fdd-4961c44049ca {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.245103] env[68638]: DEBUG nova.compute.manager [req-82088293-a907-4c82-8c1e-6ba7a2a876f4 req-5d33fc78-5228-43c6-91c0-6b3628e72cdd service nova] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Detach interface failed, port_id=316407a1-ab13-4bd4-98ef-7e090d54399c, reason: Instance 14772ba8-bde2-42ef-9a37-df876c8af321 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 793.412744] env[68638]: DEBUG oslo_vmware.api [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Task: {'id': task-2833584, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139519} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.413017] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 793.413234] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 793.413427] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 793.413599] env[68638]: INFO nova.compute.manager [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Took 1.11 seconds to destroy the instance on the hypervisor. [ 793.413842] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 793.414039] env[68638]: DEBUG nova.compute.manager [-] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 793.414135] env[68638]: DEBUG nova.network.neutron [-] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 793.429649] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c23b18e7-081e-4989-ac7b-057dcc6430c3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.432659] env[68638]: DEBUG nova.network.neutron [-] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 793.445702] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e984b04a-67d1-41f4-a8c9-5590cbe2e2e3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.449167] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833582, 'name': ReconfigVM_Task, 'duration_secs': 0.575399} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.449730] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Reconfigured VM instance instance-00000035 to attach disk [datastore1] 772af0c0-a8dd-4167-87bc-617a9d95b54d/772af0c0-a8dd-4167-87bc-617a9d95b54d.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 793.450705] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f8d3e922-ec63-4828-81a5-c4ca731ef7b5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.479594] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea4538b7-ce63-4232-b0d7-45ac9ea4230e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.482399] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 793.482399] env[68638]: value = "task-2833585" [ 793.482399] env[68638]: _type = "Task" [ 793.482399] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.491560] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fe79f8c-3425-435f-b76c-6c8fbf9926ec {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.498585] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833585, 'name': Rename_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.508769] env[68638]: DEBUG nova.compute.provider_tree [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 793.594083] env[68638]: INFO nova.compute.manager [-] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Took 1.50 seconds to deallocate network for instance. [ 793.864258] env[68638]: DEBUG nova.compute.manager [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 793.888520] env[68638]: DEBUG nova.virt.hardware [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 793.888809] env[68638]: DEBUG nova.virt.hardware [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 793.889014] env[68638]: DEBUG nova.virt.hardware [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 793.890334] env[68638]: DEBUG nova.virt.hardware [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 793.890536] env[68638]: DEBUG nova.virt.hardware [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 793.890698] env[68638]: DEBUG nova.virt.hardware [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 793.890912] env[68638]: DEBUG nova.virt.hardware [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 793.891092] env[68638]: DEBUG nova.virt.hardware [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 793.891266] env[68638]: DEBUG nova.virt.hardware [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 793.891431] env[68638]: DEBUG nova.virt.hardware [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 793.891601] env[68638]: DEBUG nova.virt.hardware [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 793.892827] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45e2fb2c-ca05-4b0c-a3f6-a8a9e68c616d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.901239] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61732d52-2819-4c2a-9dbc-92c2a76cb10e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.937866] env[68638]: DEBUG nova.network.neutron [-] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.993656] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833585, 'name': Rename_Task, 'duration_secs': 0.144257} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.994008] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 793.994303] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-77b68703-32d2-4fa1-941d-44fb6b4c5d19 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.002175] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 794.002175] env[68638]: value = "task-2833586" [ 794.002175] env[68638]: _type = "Task" [ 794.002175] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.011099] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833586, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.011251] env[68638]: DEBUG nova.scheduler.client.report [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 794.100700] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.339354] env[68638]: DEBUG nova.compute.manager [req-3d1ceeca-f399-4393-8d40-1099035127f9 req-0f4b115d-c07e-4c4c-9739-c247deaacaca service nova] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Received event network-vif-plugged-7a860c34-618e-494a-9a17-d5a14acf9fb5 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 794.339354] env[68638]: DEBUG oslo_concurrency.lockutils [req-3d1ceeca-f399-4393-8d40-1099035127f9 req-0f4b115d-c07e-4c4c-9739-c247deaacaca service nova] Acquiring lock "809416da-af6c-429d-b4b2-5334768aa744-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.339354] env[68638]: DEBUG oslo_concurrency.lockutils [req-3d1ceeca-f399-4393-8d40-1099035127f9 req-0f4b115d-c07e-4c4c-9739-c247deaacaca service nova] Lock "809416da-af6c-429d-b4b2-5334768aa744-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 794.339354] env[68638]: DEBUG oslo_concurrency.lockutils [req-3d1ceeca-f399-4393-8d40-1099035127f9 req-0f4b115d-c07e-4c4c-9739-c247deaacaca service nova] Lock "809416da-af6c-429d-b4b2-5334768aa744-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 794.339354] env[68638]: DEBUG nova.compute.manager [req-3d1ceeca-f399-4393-8d40-1099035127f9 req-0f4b115d-c07e-4c4c-9739-c247deaacaca service nova] [instance: 809416da-af6c-429d-b4b2-5334768aa744] No waiting events found dispatching network-vif-plugged-7a860c34-618e-494a-9a17-d5a14acf9fb5 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 794.339354] env[68638]: WARNING nova.compute.manager [req-3d1ceeca-f399-4393-8d40-1099035127f9 req-0f4b115d-c07e-4c4c-9739-c247deaacaca service nova] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Received unexpected event network-vif-plugged-7a860c34-618e-494a-9a17-d5a14acf9fb5 for instance with vm_state building and task_state spawning. [ 794.422925] env[68638]: DEBUG nova.network.neutron [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Successfully updated port: 7a860c34-618e-494a-9a17-d5a14acf9fb5 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 794.440832] env[68638]: INFO nova.compute.manager [-] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Took 1.03 seconds to deallocate network for instance. [ 794.512884] env[68638]: DEBUG oslo_vmware.api [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833586, 'name': PowerOnVM_Task, 'duration_secs': 0.504196} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.513208] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 794.513555] env[68638]: INFO nova.compute.manager [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Took 8.92 seconds to spawn the instance on the hypervisor. [ 794.513645] env[68638]: DEBUG nova.compute.manager [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 794.514462] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d29ba5-7963-413e-92c4-1acf74169e55 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.517582] env[68638]: DEBUG oslo_concurrency.lockutils [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.680s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 794.518062] env[68638]: DEBUG nova.compute.manager [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 794.520662] env[68638]: DEBUG oslo_concurrency.lockutils [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.791s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 794.520867] env[68638]: DEBUG nova.objects.instance [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Lazy-loading 'resources' on Instance uuid 06a1a44f-35ee-45d2-9503-23468150b72f {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 794.926307] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquiring lock "refresh_cache-809416da-af6c-429d-b4b2-5334768aa744" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.926481] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquired lock "refresh_cache-809416da-af6c-429d-b4b2-5334768aa744" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 794.926665] env[68638]: DEBUG nova.network.neutron [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 794.948268] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.026345] env[68638]: DEBUG nova.compute.utils [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 795.031610] env[68638]: DEBUG nova.compute.manager [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 795.031870] env[68638]: DEBUG nova.network.neutron [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 795.040671] env[68638]: INFO nova.compute.manager [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Took 48.55 seconds to build instance. [ 795.098923] env[68638]: DEBUG nova.policy [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a616000af9d045d7884a7e7cde99ab86', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9c7eca7abe024770ac2ffa98a2c8d58b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 795.467133] env[68638]: DEBUG nova.network.neutron [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 795.524329] env[68638]: DEBUG nova.network.neutron [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Successfully created port: da2caa54-8dc8-4b47-b0d9-9c9f5e6c86cb {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 795.532601] env[68638]: DEBUG nova.compute.manager [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 795.544034] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2a93e8aa-80cf-407d-9dda-175a89f2d21f tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "772af0c0-a8dd-4167-87bc-617a9d95b54d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.578s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.561384] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1466179b-8fd2-4902-a5b3-03505ced597e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.574446] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9461fb8-625b-4513-af88-8dcbd2b96f70 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.609807] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c17b2a9f-f209-47b4-bd32-0340453d35f6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.619069] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f7bbc3-94c8-4e86-80e7-915ad09733cc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.638197] env[68638]: DEBUG nova.compute.provider_tree [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 795.661309] env[68638]: DEBUG nova.network.neutron [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Updating instance_info_cache with network_info: [{"id": "7a860c34-618e-494a-9a17-d5a14acf9fb5", "address": "fa:16:3e:95:a1:3a", "network": {"id": "6122cb68-111c-4c39-b9c0-ffc1af5bd833", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-363451206-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d95966c092754deca9ed66c97041235b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a860c34-61", "ovs_interfaceid": "7a860c34-618e-494a-9a17-d5a14acf9fb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.047543] env[68638]: DEBUG nova.compute.manager [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 796.144226] env[68638]: DEBUG nova.scheduler.client.report [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 796.147674] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "772af0c0-a8dd-4167-87bc-617a9d95b54d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 796.147906] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "772af0c0-a8dd-4167-87bc-617a9d95b54d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 796.148119] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "772af0c0-a8dd-4167-87bc-617a9d95b54d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 796.148302] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "772af0c0-a8dd-4167-87bc-617a9d95b54d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 796.148470] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "772af0c0-a8dd-4167-87bc-617a9d95b54d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 796.151023] env[68638]: INFO nova.compute.manager [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Terminating instance [ 796.163442] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Releasing lock "refresh_cache-809416da-af6c-429d-b4b2-5334768aa744" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 796.163737] env[68638]: DEBUG nova.compute.manager [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Instance network_info: |[{"id": "7a860c34-618e-494a-9a17-d5a14acf9fb5", "address": "fa:16:3e:95:a1:3a", "network": {"id": "6122cb68-111c-4c39-b9c0-ffc1af5bd833", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-363451206-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d95966c092754deca9ed66c97041235b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a860c34-61", "ovs_interfaceid": "7a860c34-618e-494a-9a17-d5a14acf9fb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 796.164380] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:a1:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7ab8d568-adb0-4f3b-b6cc-68413e6546ae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7a860c34-618e-494a-9a17-d5a14acf9fb5', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 796.171796] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Creating folder: Project (d95966c092754deca9ed66c97041235b). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 796.172278] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f2e2228c-cb4c-4a7b-97d9-bf0be415957b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.185094] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Created folder: Project (d95966c092754deca9ed66c97041235b) in parent group-v569734. [ 796.185238] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Creating folder: Instances. Parent ref: group-v569891. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 796.185474] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-446874c7-4f57-4bcb-a9d5-2c3e6ba7747f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.196918] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Created folder: Instances in parent group-v569891. [ 796.196918] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 796.196918] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 796.197109] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fc108a44-5de4-48a8-bd7a-f8a8cbfc40ac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.216125] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 796.216125] env[68638]: value = "task-2833589" [ 796.216125] env[68638]: _type = "Task" [ 796.216125] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.224288] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833589, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.367085] env[68638]: DEBUG nova.compute.manager [req-56f317e0-ab85-49b9-8f20-2ff087bf4d8f req-794ca0c6-633d-48a5-a83b-8c57dc803d25 service nova] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Received event network-changed-7a860c34-618e-494a-9a17-d5a14acf9fb5 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 796.367331] env[68638]: DEBUG nova.compute.manager [req-56f317e0-ab85-49b9-8f20-2ff087bf4d8f req-794ca0c6-633d-48a5-a83b-8c57dc803d25 service nova] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Refreshing instance network info cache due to event network-changed-7a860c34-618e-494a-9a17-d5a14acf9fb5. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 796.367596] env[68638]: DEBUG oslo_concurrency.lockutils [req-56f317e0-ab85-49b9-8f20-2ff087bf4d8f req-794ca0c6-633d-48a5-a83b-8c57dc803d25 service nova] Acquiring lock "refresh_cache-809416da-af6c-429d-b4b2-5334768aa744" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.367757] env[68638]: DEBUG oslo_concurrency.lockutils [req-56f317e0-ab85-49b9-8f20-2ff087bf4d8f req-794ca0c6-633d-48a5-a83b-8c57dc803d25 service nova] Acquired lock "refresh_cache-809416da-af6c-429d-b4b2-5334768aa744" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 796.367919] env[68638]: DEBUG nova.network.neutron [req-56f317e0-ab85-49b9-8f20-2ff087bf4d8f req-794ca0c6-633d-48a5-a83b-8c57dc803d25 service nova] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Refreshing network info cache for port 7a860c34-618e-494a-9a17-d5a14acf9fb5 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 796.542915] env[68638]: DEBUG nova.compute.manager [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 796.570070] env[68638]: DEBUG nova.virt.hardware [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 796.570317] env[68638]: DEBUG nova.virt.hardware [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 796.570476] env[68638]: DEBUG nova.virt.hardware [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 796.570658] env[68638]: DEBUG nova.virt.hardware [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 796.570806] env[68638]: DEBUG nova.virt.hardware [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 796.570952] env[68638]: DEBUG nova.virt.hardware [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 796.571197] env[68638]: DEBUG nova.virt.hardware [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 796.571382] env[68638]: DEBUG nova.virt.hardware [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 796.571555] env[68638]: DEBUG nova.virt.hardware [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 796.571721] env[68638]: DEBUG nova.virt.hardware [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 796.571895] env[68638]: DEBUG nova.virt.hardware [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 796.573033] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b26446b-915e-4c4b-b76e-3e27b6ed1760 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.576450] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 796.583838] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7c879c-6b15-4b6e-a041-cfb4af07dea3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.651771] env[68638]: DEBUG oslo_concurrency.lockutils [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.131s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 796.654445] env[68638]: DEBUG oslo_concurrency.lockutils [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.563s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 796.654697] env[68638]: DEBUG nova.objects.instance [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lazy-loading 'resources' on Instance uuid 8992f062-c28f-4ac8-8d0d-0c51c3784e88 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 796.656376] env[68638]: DEBUG nova.compute.manager [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 796.656592] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 796.657676] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acdb124f-26ca-4840-abab-77f23c1df09b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.669174] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 796.669423] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c40e3ca5-c574-4310-ae4c-2961a7a43726 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.671805] env[68638]: INFO nova.scheduler.client.report [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Deleted allocations for instance 06a1a44f-35ee-45d2-9503-23468150b72f [ 796.678971] env[68638]: DEBUG oslo_vmware.api [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 796.678971] env[68638]: value = "task-2833590" [ 796.678971] env[68638]: _type = "Task" [ 796.678971] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.687655] env[68638]: DEBUG oslo_vmware.api [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833590, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.728027] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833589, 'name': CreateVM_Task, 'duration_secs': 0.389204} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.728027] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 796.728714] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.728892] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 796.729250] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 796.729585] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee183d58-300b-4b0c-ab83-8e0ea3ad56e2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.735082] env[68638]: DEBUG oslo_vmware.api [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Waiting for the task: (returnval){ [ 796.735082] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52be998b-f4d5-bea7-e2fd-c0c0fa22ba27" [ 796.735082] env[68638]: _type = "Task" [ 796.735082] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.744416] env[68638]: DEBUG oslo_vmware.api [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52be998b-f4d5-bea7-e2fd-c0c0fa22ba27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.178839] env[68638]: DEBUG oslo_concurrency.lockutils [None req-70074c28-09ae-4939-baf8-f89b913bf662 tempest-ServersListShow296Test-1262795635 tempest-ServersListShow296Test-1262795635-project-member] Lock "06a1a44f-35ee-45d2-9503-23468150b72f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.700s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 797.191063] env[68638]: DEBUG oslo_vmware.api [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833590, 'name': PowerOffVM_Task, 'duration_secs': 0.204065} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.191348] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 797.191839] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 797.191839] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-80a76338-b436-4de8-905e-ba513c33b95b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.210272] env[68638]: DEBUG nova.network.neutron [req-56f317e0-ab85-49b9-8f20-2ff087bf4d8f req-794ca0c6-633d-48a5-a83b-8c57dc803d25 service nova] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Updated VIF entry in instance network info cache for port 7a860c34-618e-494a-9a17-d5a14acf9fb5. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 797.210272] env[68638]: DEBUG nova.network.neutron [req-56f317e0-ab85-49b9-8f20-2ff087bf4d8f req-794ca0c6-633d-48a5-a83b-8c57dc803d25 service nova] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Updating instance_info_cache with network_info: [{"id": "7a860c34-618e-494a-9a17-d5a14acf9fb5", "address": "fa:16:3e:95:a1:3a", "network": {"id": "6122cb68-111c-4c39-b9c0-ffc1af5bd833", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-363451206-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d95966c092754deca9ed66c97041235b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a860c34-61", "ovs_interfaceid": "7a860c34-618e-494a-9a17-d5a14acf9fb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.218598] env[68638]: DEBUG nova.network.neutron [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Successfully updated port: da2caa54-8dc8-4b47-b0d9-9c9f5e6c86cb {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 797.251572] env[68638]: DEBUG oslo_vmware.api [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52be998b-f4d5-bea7-e2fd-c0c0fa22ba27, 'name': SearchDatastore_Task, 'duration_secs': 0.0136} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.252142] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 797.252232] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 797.252507] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.252609] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 797.252878] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 797.253203] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7af418d4-16f8-4d92-8fc3-4af497f46ec1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.263024] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 797.263514] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 797.263739] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Deleting the datastore file [datastore1] 772af0c0-a8dd-4167-87bc-617a9d95b54d {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 797.264013] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d1b77fee-3e0b-4a5f-b53f-9283f2db43aa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.271379] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 797.271578] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 797.275210] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7cdc26c6-c95c-41e6-b7af-be4567373f82 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.280204] env[68638]: DEBUG oslo_vmware.api [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 797.280204] env[68638]: value = "task-2833592" [ 797.280204] env[68638]: _type = "Task" [ 797.280204] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.284316] env[68638]: DEBUG oslo_vmware.api [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Waiting for the task: (returnval){ [ 797.284316] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52de9953-acda-3f7d-df1d-8a89284ba965" [ 797.284316] env[68638]: _type = "Task" [ 797.284316] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.290493] env[68638]: DEBUG oslo_vmware.api [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833592, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.298900] env[68638]: DEBUG oslo_vmware.api [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52de9953-acda-3f7d-df1d-8a89284ba965, 'name': SearchDatastore_Task, 'duration_secs': 0.010198} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.299910] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6aa39b6b-4e3a-4b1a-8114-217f73580bed {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.307513] env[68638]: DEBUG oslo_vmware.api [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Waiting for the task: (returnval){ [ 797.307513] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52329d6e-6cc0-d8ff-fa68-714efe14ef3e" [ 797.307513] env[68638]: _type = "Task" [ 797.307513] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.316265] env[68638]: DEBUG oslo_vmware.api [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52329d6e-6cc0-d8ff-fa68-714efe14ef3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.689968] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a22f30dc-f66e-4374-86b8-bf0efdcb725e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.698235] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d1951ae-7a36-4c3f-8c77-16f3ba63742c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.731873] env[68638]: DEBUG oslo_concurrency.lockutils [req-56f317e0-ab85-49b9-8f20-2ff087bf4d8f req-794ca0c6-633d-48a5-a83b-8c57dc803d25 service nova] Releasing lock "refresh_cache-809416da-af6c-429d-b4b2-5334768aa744" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 797.732415] env[68638]: DEBUG oslo_concurrency.lockutils [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Acquiring lock "refresh_cache-333d88b6-2182-4e9c-9430-058e67921828" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.732554] env[68638]: DEBUG oslo_concurrency.lockutils [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Acquired lock "refresh_cache-333d88b6-2182-4e9c-9430-058e67921828" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 797.732685] env[68638]: DEBUG nova.network.neutron [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 797.735826] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a30fad84-b5a7-46de-b98c-97e27e7a2e11 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.744704] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f84622b0-fa27-4f26-95e0-e44e37bf3f9a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.760709] env[68638]: DEBUG nova.compute.provider_tree [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 797.789793] env[68638]: DEBUG oslo_vmware.api [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833592, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134328} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.790097] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 797.790292] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 797.790474] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 797.790640] env[68638]: INFO nova.compute.manager [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Took 1.13 seconds to destroy the instance on the hypervisor. [ 797.790875] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 797.791089] env[68638]: DEBUG nova.compute.manager [-] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 797.791182] env[68638]: DEBUG nova.network.neutron [-] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 797.816813] env[68638]: DEBUG oslo_vmware.api [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52329d6e-6cc0-d8ff-fa68-714efe14ef3e, 'name': SearchDatastore_Task, 'duration_secs': 0.010371} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.817088] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 797.817357] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 809416da-af6c-429d-b4b2-5334768aa744/809416da-af6c-429d-b4b2-5334768aa744.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 797.817624] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-314a4310-258d-4aa6-a940-17efcf2998df {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.827527] env[68638]: DEBUG oslo_vmware.api [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Waiting for the task: (returnval){ [ 797.827527] env[68638]: value = "task-2833593" [ 797.827527] env[68638]: _type = "Task" [ 797.827527] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.838567] env[68638]: DEBUG oslo_vmware.api [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833593, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.263102] env[68638]: DEBUG nova.scheduler.client.report [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 798.286168] env[68638]: DEBUG nova.network.neutron [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 798.341502] env[68638]: DEBUG oslo_vmware.api [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833593, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.484384} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.341502] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 809416da-af6c-429d-b4b2-5334768aa744/809416da-af6c-429d-b4b2-5334768aa744.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 798.341728] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 798.342148] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bf204e89-818a-4f64-aada-51b7fa65195e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.349732] env[68638]: DEBUG oslo_vmware.api [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Waiting for the task: (returnval){ [ 798.349732] env[68638]: value = "task-2833594" [ 798.349732] env[68638]: _type = "Task" [ 798.349732] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.359959] env[68638]: DEBUG oslo_vmware.api [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833594, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.456533] env[68638]: DEBUG nova.compute.manager [req-a85f5555-0fd8-40cd-9bfb-b010b0f0fca6 req-11edefae-a085-44f3-8820-56279d01ee2b service nova] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Received event network-vif-plugged-da2caa54-8dc8-4b47-b0d9-9c9f5e6c86cb {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 798.456672] env[68638]: DEBUG oslo_concurrency.lockutils [req-a85f5555-0fd8-40cd-9bfb-b010b0f0fca6 req-11edefae-a085-44f3-8820-56279d01ee2b service nova] Acquiring lock "333d88b6-2182-4e9c-9430-058e67921828-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 798.456937] env[68638]: DEBUG oslo_concurrency.lockutils [req-a85f5555-0fd8-40cd-9bfb-b010b0f0fca6 req-11edefae-a085-44f3-8820-56279d01ee2b service nova] Lock "333d88b6-2182-4e9c-9430-058e67921828-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 798.457136] env[68638]: DEBUG oslo_concurrency.lockutils [req-a85f5555-0fd8-40cd-9bfb-b010b0f0fca6 req-11edefae-a085-44f3-8820-56279d01ee2b service nova] Lock "333d88b6-2182-4e9c-9430-058e67921828-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.457386] env[68638]: DEBUG nova.compute.manager [req-a85f5555-0fd8-40cd-9bfb-b010b0f0fca6 req-11edefae-a085-44f3-8820-56279d01ee2b service nova] [instance: 333d88b6-2182-4e9c-9430-058e67921828] No waiting events found dispatching network-vif-plugged-da2caa54-8dc8-4b47-b0d9-9c9f5e6c86cb {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 798.457548] env[68638]: WARNING nova.compute.manager [req-a85f5555-0fd8-40cd-9bfb-b010b0f0fca6 req-11edefae-a085-44f3-8820-56279d01ee2b service nova] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Received unexpected event network-vif-plugged-da2caa54-8dc8-4b47-b0d9-9c9f5e6c86cb for instance with vm_state building and task_state spawning. [ 798.457708] env[68638]: DEBUG nova.compute.manager [req-a85f5555-0fd8-40cd-9bfb-b010b0f0fca6 req-11edefae-a085-44f3-8820-56279d01ee2b service nova] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Received event network-changed-da2caa54-8dc8-4b47-b0d9-9c9f5e6c86cb {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 798.457966] env[68638]: DEBUG nova.compute.manager [req-a85f5555-0fd8-40cd-9bfb-b010b0f0fca6 req-11edefae-a085-44f3-8820-56279d01ee2b service nova] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Refreshing instance network info cache due to event network-changed-da2caa54-8dc8-4b47-b0d9-9c9f5e6c86cb. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 798.458040] env[68638]: DEBUG oslo_concurrency.lockutils [req-a85f5555-0fd8-40cd-9bfb-b010b0f0fca6 req-11edefae-a085-44f3-8820-56279d01ee2b service nova] Acquiring lock "refresh_cache-333d88b6-2182-4e9c-9430-058e67921828" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.461414] env[68638]: DEBUG nova.network.neutron [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Updating instance_info_cache with network_info: [{"id": "da2caa54-8dc8-4b47-b0d9-9c9f5e6c86cb", "address": "fa:16:3e:0e:58:b7", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.76", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda2caa54-8d", "ovs_interfaceid": "da2caa54-8dc8-4b47-b0d9-9c9f5e6c86cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.613987] env[68638]: DEBUG nova.network.neutron [-] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.769064] env[68638]: DEBUG oslo_concurrency.lockutils [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.114s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.771766] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.672s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 798.771766] env[68638]: DEBUG nova.objects.instance [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lazy-loading 'resources' on Instance uuid 94a33fcd-69b6-443b-9c86-5129e30b5b0d {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 798.790991] env[68638]: INFO nova.scheduler.client.report [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Deleted allocations for instance 8992f062-c28f-4ac8-8d0d-0c51c3784e88 [ 798.859941] env[68638]: DEBUG oslo_vmware.api [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833594, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065925} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.860335] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 798.861124] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-718e3e8a-0a18-40a2-bd77-cbea7b4f7aac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.883361] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] 809416da-af6c-429d-b4b2-5334768aa744/809416da-af6c-429d-b4b2-5334768aa744.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 798.883617] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c524711e-8261-4444-a894-0bad66a4b2a5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.905257] env[68638]: DEBUG oslo_vmware.api [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Waiting for the task: (returnval){ [ 798.905257] env[68638]: value = "task-2833595" [ 798.905257] env[68638]: _type = "Task" [ 798.905257] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.915124] env[68638]: DEBUG oslo_vmware.api [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833595, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.963949] env[68638]: DEBUG oslo_concurrency.lockutils [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Releasing lock "refresh_cache-333d88b6-2182-4e9c-9430-058e67921828" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 798.964490] env[68638]: DEBUG nova.compute.manager [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Instance network_info: |[{"id": "da2caa54-8dc8-4b47-b0d9-9c9f5e6c86cb", "address": "fa:16:3e:0e:58:b7", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.76", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda2caa54-8d", "ovs_interfaceid": "da2caa54-8dc8-4b47-b0d9-9c9f5e6c86cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 798.964848] env[68638]: DEBUG oslo_concurrency.lockutils [req-a85f5555-0fd8-40cd-9bfb-b010b0f0fca6 req-11edefae-a085-44f3-8820-56279d01ee2b service nova] Acquired lock "refresh_cache-333d88b6-2182-4e9c-9430-058e67921828" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 798.965081] env[68638]: DEBUG nova.network.neutron [req-a85f5555-0fd8-40cd-9bfb-b010b0f0fca6 req-11edefae-a085-44f3-8820-56279d01ee2b service nova] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Refreshing network info cache for port da2caa54-8dc8-4b47-b0d9-9c9f5e6c86cb {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 798.966420] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:58:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '205fb402-8eaf-4b61-8f57-8f216024179a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'da2caa54-8dc8-4b47-b0d9-9c9f5e6c86cb', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 798.974328] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Creating folder: Project (9c7eca7abe024770ac2ffa98a2c8d58b). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 798.977475] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e1a498ea-3f56-4488-9bee-db48a70afe38 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.990672] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Created folder: Project (9c7eca7abe024770ac2ffa98a2c8d58b) in parent group-v569734. [ 798.990889] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Creating folder: Instances. Parent ref: group-v569894. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 798.991439] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aee0bb61-2848-48f7-b92d-64a6714816d4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.001535] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Created folder: Instances in parent group-v569894. [ 799.001919] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 799.002026] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 799.002221] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5578da4c-bed8-4bae-93ea-eb1221e45b76 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.022890] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 799.022890] env[68638]: value = "task-2833598" [ 799.022890] env[68638]: _type = "Task" [ 799.022890] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.035690] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833598, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.120041] env[68638]: INFO nova.compute.manager [-] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Took 1.33 seconds to deallocate network for instance. [ 799.231368] env[68638]: DEBUG nova.network.neutron [req-a85f5555-0fd8-40cd-9bfb-b010b0f0fca6 req-11edefae-a085-44f3-8820-56279d01ee2b service nova] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Updated VIF entry in instance network info cache for port da2caa54-8dc8-4b47-b0d9-9c9f5e6c86cb. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 799.232032] env[68638]: DEBUG nova.network.neutron [req-a85f5555-0fd8-40cd-9bfb-b010b0f0fca6 req-11edefae-a085-44f3-8820-56279d01ee2b service nova] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Updating instance_info_cache with network_info: [{"id": "da2caa54-8dc8-4b47-b0d9-9c9f5e6c86cb", "address": "fa:16:3e:0e:58:b7", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.76", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda2caa54-8d", "ovs_interfaceid": "da2caa54-8dc8-4b47-b0d9-9c9f5e6c86cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.302944] env[68638]: DEBUG oslo_concurrency.lockutils [None req-07fdac87-a956-4d4f-902c-cdf0f49fa821 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "8992f062-c28f-4ac8-8d0d-0c51c3784e88" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.918s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 799.414793] env[68638]: DEBUG oslo_vmware.api [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833595, 'name': ReconfigVM_Task, 'duration_secs': 0.307651} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.417186] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Reconfigured VM instance instance-00000036 to attach disk [datastore2] 809416da-af6c-429d-b4b2-5334768aa744/809416da-af6c-429d-b4b2-5334768aa744.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 799.417962] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-394e8365-d8e2-4602-8af1-c556fb693816 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.425609] env[68638]: DEBUG oslo_vmware.api [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Waiting for the task: (returnval){ [ 799.425609] env[68638]: value = "task-2833599" [ 799.425609] env[68638]: _type = "Task" [ 799.425609] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.436050] env[68638]: DEBUG oslo_vmware.api [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833599, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.535871] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833598, 'name': CreateVM_Task, 'duration_secs': 0.360784} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.536273] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 799.537144] env[68638]: DEBUG oslo_concurrency.lockutils [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.537451] env[68638]: DEBUG oslo_concurrency.lockutils [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 799.537961] env[68638]: DEBUG oslo_concurrency.lockutils [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 799.538365] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c232560-0fb4-4686-b34e-68fec53f78d5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.545051] env[68638]: DEBUG oslo_vmware.api [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 799.545051] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]520d5c9f-417f-65ab-9246-e36b44798c80" [ 799.545051] env[68638]: _type = "Task" [ 799.545051] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.552956] env[68638]: DEBUG oslo_vmware.api [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]520d5c9f-417f-65ab-9246-e36b44798c80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.627890] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.734944] env[68638]: DEBUG oslo_concurrency.lockutils [req-a85f5555-0fd8-40cd-9bfb-b010b0f0fca6 req-11edefae-a085-44f3-8820-56279d01ee2b service nova] Releasing lock "refresh_cache-333d88b6-2182-4e9c-9430-058e67921828" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 799.735185] env[68638]: DEBUG nova.compute.manager [req-a85f5555-0fd8-40cd-9bfb-b010b0f0fca6 req-11edefae-a085-44f3-8820-56279d01ee2b service nova] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Received event network-vif-deleted-0f78cf8f-95cb-4e44-8401-00f04386577b {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 799.735392] env[68638]: INFO nova.compute.manager [req-a85f5555-0fd8-40cd-9bfb-b010b0f0fca6 req-11edefae-a085-44f3-8820-56279d01ee2b service nova] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Neutron deleted interface 0f78cf8f-95cb-4e44-8401-00f04386577b; detaching it from the instance and deleting it from the info cache [ 799.735572] env[68638]: DEBUG nova.network.neutron [req-a85f5555-0fd8-40cd-9bfb-b010b0f0fca6 req-11edefae-a085-44f3-8820-56279d01ee2b service nova] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.828452] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ccc2be2-4435-42fa-8fd0-891d406a1ca3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.836805] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca41eada-6358-40c8-adb6-f43516a02da5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.866426] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5662c1b7-acf5-418f-a106-b7149e1240ea {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.874088] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de2903a-3b74-4326-a1be-78aee547f97b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.887960] env[68638]: DEBUG nova.compute.provider_tree [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 799.935626] env[68638]: DEBUG oslo_vmware.api [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833599, 'name': Rename_Task, 'duration_secs': 0.159188} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.935838] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 799.936099] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b4c008e4-7d5d-4a69-8f57-5e111d7f62e0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.942920] env[68638]: DEBUG oslo_vmware.api [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Waiting for the task: (returnval){ [ 799.942920] env[68638]: value = "task-2833600" [ 799.942920] env[68638]: _type = "Task" [ 799.942920] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.951906] env[68638]: DEBUG oslo_vmware.api [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833600, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.055938] env[68638]: DEBUG oslo_vmware.api [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]520d5c9f-417f-65ab-9246-e36b44798c80, 'name': SearchDatastore_Task, 'duration_secs': 0.021463} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.056301] env[68638]: DEBUG oslo_concurrency.lockutils [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 800.056544] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 800.056777] env[68638]: DEBUG oslo_concurrency.lockutils [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.056918] env[68638]: DEBUG oslo_concurrency.lockutils [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 800.057100] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 800.057362] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-08b416ec-f9f7-4ff7-8faf-31e21aac0d44 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.065707] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 800.065883] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 800.066625] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89c865e7-ef7c-459d-939f-50404faace38 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.071560] env[68638]: DEBUG oslo_vmware.api [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 800.071560] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]526e96e3-a662-2e41-f058-71ed62705f1a" [ 800.071560] env[68638]: _type = "Task" [ 800.071560] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.079500] env[68638]: DEBUG oslo_vmware.api [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]526e96e3-a662-2e41-f058-71ed62705f1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.241627] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7f78ebbd-4bdd-49cb-ab7c-cc8e1e53d1c4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.252363] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c184bec-b0af-437f-a2e4-c8a4637512e2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.287210] env[68638]: DEBUG nova.compute.manager [req-a85f5555-0fd8-40cd-9bfb-b010b0f0fca6 req-11edefae-a085-44f3-8820-56279d01ee2b service nova] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Detach interface failed, port_id=0f78cf8f-95cb-4e44-8401-00f04386577b, reason: Instance 772af0c0-a8dd-4167-87bc-617a9d95b54d could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 800.392968] env[68638]: DEBUG nova.scheduler.client.report [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 800.455293] env[68638]: DEBUG oslo_vmware.api [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833600, 'name': PowerOnVM_Task, 'duration_secs': 0.47086} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.455293] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 800.455293] env[68638]: INFO nova.compute.manager [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Took 6.59 seconds to spawn the instance on the hypervisor. [ 800.455293] env[68638]: DEBUG nova.compute.manager [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 800.455666] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d33f0bdf-741e-4794-9ce0-5b3fa0766ce5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.582631] env[68638]: DEBUG oslo_vmware.api [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]526e96e3-a662-2e41-f058-71ed62705f1a, 'name': SearchDatastore_Task, 'duration_secs': 0.008957} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.583522] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e79f1023-7992-4a1a-8555-f1fedd9d556d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.589399] env[68638]: DEBUG oslo_vmware.api [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 800.589399] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5299f388-bc37-8a0b-b0ce-3477122249e7" [ 800.589399] env[68638]: _type = "Task" [ 800.589399] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.597201] env[68638]: DEBUG oslo_vmware.api [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5299f388-bc37-8a0b-b0ce-3477122249e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.899532] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.128s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.901902] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 43.787s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.903411] env[68638]: INFO nova.compute.claims [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 800.922588] env[68638]: INFO nova.scheduler.client.report [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Deleted allocations for instance 94a33fcd-69b6-443b-9c86-5129e30b5b0d [ 800.972799] env[68638]: INFO nova.compute.manager [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Took 50.58 seconds to build instance. [ 801.100512] env[68638]: DEBUG oslo_vmware.api [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5299f388-bc37-8a0b-b0ce-3477122249e7, 'name': SearchDatastore_Task, 'duration_secs': 0.014436} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.100804] env[68638]: DEBUG oslo_concurrency.lockutils [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 801.101087] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 333d88b6-2182-4e9c-9430-058e67921828/333d88b6-2182-4e9c-9430-058e67921828.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 801.101364] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-68a7969b-b8b5-42ba-916f-3d8fdb2e74b9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.109133] env[68638]: DEBUG oslo_vmware.api [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 801.109133] env[68638]: value = "task-2833601" [ 801.109133] env[68638]: _type = "Task" [ 801.109133] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.117993] env[68638]: DEBUG oslo_vmware.api [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833601, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.431783] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d1581744-21e1-4a44-8e1b-819e28345774 tempest-MultipleCreateTestJSON-988895817 tempest-MultipleCreateTestJSON-988895817-project-member] Lock "94a33fcd-69b6-443b-9c86-5129e30b5b0d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.233s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 801.475323] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7676cd96-4672-4fe8-b29c-d58743e6b113 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Lock "809416da-af6c-429d-b4b2-5334768aa744" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.299s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 801.620392] env[68638]: DEBUG oslo_vmware.api [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833601, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.465665} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.620708] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 333d88b6-2182-4e9c-9430-058e67921828/333d88b6-2182-4e9c-9430-058e67921828.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 801.620925] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 801.621217] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-730e504c-bb14-4146-aca5-ae4e0bc8b5f0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.629163] env[68638]: DEBUG oslo_vmware.api [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 801.629163] env[68638]: value = "task-2833602" [ 801.629163] env[68638]: _type = "Task" [ 801.629163] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.641276] env[68638]: DEBUG oslo_vmware.api [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833602, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.978522] env[68638]: DEBUG nova.compute.manager [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 802.107614] env[68638]: DEBUG nova.compute.manager [req-bb3fe84b-df1a-41bd-8b4f-2f3370916758 req-2d2a58b7-c87b-42d0-a000-014108d46de8 service nova] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Received event network-changed-7a860c34-618e-494a-9a17-d5a14acf9fb5 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 802.107763] env[68638]: DEBUG nova.compute.manager [req-bb3fe84b-df1a-41bd-8b4f-2f3370916758 req-2d2a58b7-c87b-42d0-a000-014108d46de8 service nova] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Refreshing instance network info cache due to event network-changed-7a860c34-618e-494a-9a17-d5a14acf9fb5. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 802.107945] env[68638]: DEBUG oslo_concurrency.lockutils [req-bb3fe84b-df1a-41bd-8b4f-2f3370916758 req-2d2a58b7-c87b-42d0-a000-014108d46de8 service nova] Acquiring lock "refresh_cache-809416da-af6c-429d-b4b2-5334768aa744" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.109199] env[68638]: DEBUG oslo_concurrency.lockutils [req-bb3fe84b-df1a-41bd-8b4f-2f3370916758 req-2d2a58b7-c87b-42d0-a000-014108d46de8 service nova] Acquired lock "refresh_cache-809416da-af6c-429d-b4b2-5334768aa744" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 802.109199] env[68638]: DEBUG nova.network.neutron [req-bb3fe84b-df1a-41bd-8b4f-2f3370916758 req-2d2a58b7-c87b-42d0-a000-014108d46de8 service nova] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Refreshing network info cache for port 7a860c34-618e-494a-9a17-d5a14acf9fb5 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 802.143052] env[68638]: DEBUG oslo_vmware.api [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833602, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063468} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.143052] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 802.143743] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7468cea2-f180-47cb-840d-4d28358a0b4e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.171737] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] 333d88b6-2182-4e9c-9430-058e67921828/333d88b6-2182-4e9c-9430-058e67921828.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 802.174805] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f67abc1-29c3-4312-b88a-cd552665eb22 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.198021] env[68638]: DEBUG oslo_vmware.api [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 802.198021] env[68638]: value = "task-2833603" [ 802.198021] env[68638]: _type = "Task" [ 802.198021] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.210299] env[68638]: DEBUG oslo_vmware.api [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833603, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.458147] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235f82d2-4fdd-4031-b2e9-9a0ca51b65c0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.468262] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-425769b8-0d84-4277-b423-a5ca0bfa86bf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.513856] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65fb837c-f9d9-4b11-9f57-0014996d9bd9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.523986] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aceb2f9-8301-4165-8b12-a84b59cef5dc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.528565] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.540934] env[68638]: DEBUG nova.compute.provider_tree [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 802.710471] env[68638]: DEBUG oslo_vmware.api [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833603, 'name': ReconfigVM_Task, 'duration_secs': 0.336677} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.710965] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Reconfigured VM instance instance-00000037 to attach disk [datastore2] 333d88b6-2182-4e9c-9430-058e67921828/333d88b6-2182-4e9c-9430-058e67921828.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 802.711928] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-18d8572d-9e52-4a37-a831-798fd30b1ce9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.721736] env[68638]: DEBUG oslo_vmware.api [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 802.721736] env[68638]: value = "task-2833604" [ 802.721736] env[68638]: _type = "Task" [ 802.721736] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.735329] env[68638]: DEBUG oslo_vmware.api [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833604, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.046738] env[68638]: DEBUG nova.network.neutron [req-bb3fe84b-df1a-41bd-8b4f-2f3370916758 req-2d2a58b7-c87b-42d0-a000-014108d46de8 service nova] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Updated VIF entry in instance network info cache for port 7a860c34-618e-494a-9a17-d5a14acf9fb5. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 803.046738] env[68638]: DEBUG nova.network.neutron [req-bb3fe84b-df1a-41bd-8b4f-2f3370916758 req-2d2a58b7-c87b-42d0-a000-014108d46de8 service nova] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Updating instance_info_cache with network_info: [{"id": "7a860c34-618e-494a-9a17-d5a14acf9fb5", "address": "fa:16:3e:95:a1:3a", "network": {"id": "6122cb68-111c-4c39-b9c0-ffc1af5bd833", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-363451206-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d95966c092754deca9ed66c97041235b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a860c34-61", "ovs_interfaceid": "7a860c34-618e-494a-9a17-d5a14acf9fb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.046738] env[68638]: DEBUG nova.scheduler.client.report [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 803.235984] env[68638]: DEBUG oslo_vmware.api [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833604, 'name': Rename_Task, 'duration_secs': 0.151455} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.237796] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 803.237796] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c548cfa9-b5b3-468c-ae0c-d01894c09230 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.249032] env[68638]: DEBUG oslo_vmware.api [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 803.249032] env[68638]: value = "task-2833605" [ 803.249032] env[68638]: _type = "Task" [ 803.249032] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.259947] env[68638]: DEBUG oslo_vmware.api [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833605, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.553029] env[68638]: DEBUG oslo_concurrency.lockutils [req-bb3fe84b-df1a-41bd-8b4f-2f3370916758 req-2d2a58b7-c87b-42d0-a000-014108d46de8 service nova] Releasing lock "refresh_cache-809416da-af6c-429d-b4b2-5334768aa744" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 803.553961] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.652s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.554508] env[68638]: DEBUG nova.compute.manager [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 803.557124] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.966s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.561134] env[68638]: INFO nova.compute.claims [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 803.759445] env[68638]: DEBUG oslo_vmware.api [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833605, 'name': PowerOnVM_Task, 'duration_secs': 0.473605} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.759793] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 803.760052] env[68638]: INFO nova.compute.manager [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Took 7.22 seconds to spawn the instance on the hypervisor. [ 803.760296] env[68638]: DEBUG nova.compute.manager [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 803.761159] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c26b305-16c5-4129-bffc-3cf6d5de3457 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.063796] env[68638]: DEBUG nova.compute.utils [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 804.067766] env[68638]: DEBUG nova.compute.manager [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 804.067766] env[68638]: DEBUG nova.network.neutron [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 804.130468] env[68638]: DEBUG nova.policy [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a616000af9d045d7884a7e7cde99ab86', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9c7eca7abe024770ac2ffa98a2c8d58b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 804.249333] env[68638]: DEBUG nova.compute.manager [req-c783e4e0-318e-4b29-98ce-18f9b6879d4b req-404150e5-4191-425c-ac92-0ee14754debc service nova] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Received event network-changed-7a860c34-618e-494a-9a17-d5a14acf9fb5 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 804.249333] env[68638]: DEBUG nova.compute.manager [req-c783e4e0-318e-4b29-98ce-18f9b6879d4b req-404150e5-4191-425c-ac92-0ee14754debc service nova] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Refreshing instance network info cache due to event network-changed-7a860c34-618e-494a-9a17-d5a14acf9fb5. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 804.249333] env[68638]: DEBUG oslo_concurrency.lockutils [req-c783e4e0-318e-4b29-98ce-18f9b6879d4b req-404150e5-4191-425c-ac92-0ee14754debc service nova] Acquiring lock "refresh_cache-809416da-af6c-429d-b4b2-5334768aa744" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.249333] env[68638]: DEBUG oslo_concurrency.lockutils [req-c783e4e0-318e-4b29-98ce-18f9b6879d4b req-404150e5-4191-425c-ac92-0ee14754debc service nova] Acquired lock "refresh_cache-809416da-af6c-429d-b4b2-5334768aa744" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 804.249333] env[68638]: DEBUG nova.network.neutron [req-c783e4e0-318e-4b29-98ce-18f9b6879d4b req-404150e5-4191-425c-ac92-0ee14754debc service nova] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Refreshing network info cache for port 7a860c34-618e-494a-9a17-d5a14acf9fb5 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 804.284860] env[68638]: INFO nova.compute.manager [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Took 53.07 seconds to build instance. [ 804.503498] env[68638]: DEBUG nova.network.neutron [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Successfully created port: 2c1b0e86-ed9d-4de7-9bcc-3c49c155a2c8 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 804.574021] env[68638]: DEBUG nova.compute.manager [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 804.788771] env[68638]: DEBUG oslo_concurrency.lockutils [None req-471d4f35-ebaa-4a0a-aa49-e556bbbbcd09 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lock "333d88b6-2182-4e9c-9430-058e67921828" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.516s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.025926] env[68638]: DEBUG nova.network.neutron [req-c783e4e0-318e-4b29-98ce-18f9b6879d4b req-404150e5-4191-425c-ac92-0ee14754debc service nova] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Updated VIF entry in instance network info cache for port 7a860c34-618e-494a-9a17-d5a14acf9fb5. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 805.026350] env[68638]: DEBUG nova.network.neutron [req-c783e4e0-318e-4b29-98ce-18f9b6879d4b req-404150e5-4191-425c-ac92-0ee14754debc service nova] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Updating instance_info_cache with network_info: [{"id": "7a860c34-618e-494a-9a17-d5a14acf9fb5", "address": "fa:16:3e:95:a1:3a", "network": {"id": "6122cb68-111c-4c39-b9c0-ffc1af5bd833", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-363451206-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d95966c092754deca9ed66c97041235b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a860c34-61", "ovs_interfaceid": "7a860c34-618e-494a-9a17-d5a14acf9fb5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.128919] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42e001a-214b-4d6b-84bd-92a155dce9ea {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.137396] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-425cb0eb-b6ff-466f-b91a-5b1d79277fb6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.169154] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-562d0f3f-2ff9-4eb0-bc27-4fb5f2b1bdc4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.177300] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13a40a67-63d7-421f-8f91-454e232f3cd4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.190914] env[68638]: DEBUG nova.compute.provider_tree [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 805.294866] env[68638]: DEBUG nova.compute.manager [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 805.529120] env[68638]: DEBUG oslo_concurrency.lockutils [req-c783e4e0-318e-4b29-98ce-18f9b6879d4b req-404150e5-4191-425c-ac92-0ee14754debc service nova] Releasing lock "refresh_cache-809416da-af6c-429d-b4b2-5334768aa744" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 805.585152] env[68638]: DEBUG nova.compute.manager [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 805.608956] env[68638]: DEBUG nova.virt.hardware [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 805.609440] env[68638]: DEBUG nova.virt.hardware [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 805.609715] env[68638]: DEBUG nova.virt.hardware [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 805.610664] env[68638]: DEBUG nova.virt.hardware [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 805.610664] env[68638]: DEBUG nova.virt.hardware [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 805.610664] env[68638]: DEBUG nova.virt.hardware [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 805.610664] env[68638]: DEBUG nova.virt.hardware [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 805.610884] env[68638]: DEBUG nova.virt.hardware [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 805.610884] env[68638]: DEBUG nova.virt.hardware [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 805.611010] env[68638]: DEBUG nova.virt.hardware [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 805.611203] env[68638]: DEBUG nova.virt.hardware [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 805.612101] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84123c08-2862-4ead-a4e4-e35a0e03c462 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.621961] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ee92ee-ff0d-4461-9a66-3c1e88a1b88f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.693836] env[68638]: DEBUG nova.scheduler.client.report [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 805.816148] env[68638]: DEBUG oslo_concurrency.lockutils [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.094419] env[68638]: DEBUG nova.compute.manager [req-35a074c1-292e-4652-ae79-0dd24d556a16 req-a18bf0c7-a591-49ea-ae5c-b56d706c106c service nova] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Received event network-vif-plugged-2c1b0e86-ed9d-4de7-9bcc-3c49c155a2c8 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 806.094419] env[68638]: DEBUG oslo_concurrency.lockutils [req-35a074c1-292e-4652-ae79-0dd24d556a16 req-a18bf0c7-a591-49ea-ae5c-b56d706c106c service nova] Acquiring lock "63669b15-2ec8-4a0d-b772-6ef7407e8ebf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.094419] env[68638]: DEBUG oslo_concurrency.lockutils [req-35a074c1-292e-4652-ae79-0dd24d556a16 req-a18bf0c7-a591-49ea-ae5c-b56d706c106c service nova] Lock "63669b15-2ec8-4a0d-b772-6ef7407e8ebf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 806.094578] env[68638]: DEBUG oslo_concurrency.lockutils [req-35a074c1-292e-4652-ae79-0dd24d556a16 req-a18bf0c7-a591-49ea-ae5c-b56d706c106c service nova] Lock "63669b15-2ec8-4a0d-b772-6ef7407e8ebf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.094624] env[68638]: DEBUG nova.compute.manager [req-35a074c1-292e-4652-ae79-0dd24d556a16 req-a18bf0c7-a591-49ea-ae5c-b56d706c106c service nova] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] No waiting events found dispatching network-vif-plugged-2c1b0e86-ed9d-4de7-9bcc-3c49c155a2c8 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 806.094759] env[68638]: WARNING nova.compute.manager [req-35a074c1-292e-4652-ae79-0dd24d556a16 req-a18bf0c7-a591-49ea-ae5c-b56d706c106c service nova] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Received unexpected event network-vif-plugged-2c1b0e86-ed9d-4de7-9bcc-3c49c155a2c8 for instance with vm_state building and task_state spawning. [ 806.187612] env[68638]: DEBUG nova.network.neutron [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Successfully updated port: 2c1b0e86-ed9d-4de7-9bcc-3c49c155a2c8 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 806.199809] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.643s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.200603] env[68638]: DEBUG nova.compute.manager [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 806.205039] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.627s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 806.205039] env[68638]: DEBUG nova.objects.instance [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lazy-loading 'resources' on Instance uuid 1946baab-bb48-4138-8db6-1f530e432c3d {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 806.415789] env[68638]: DEBUG oslo_concurrency.lockutils [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquiring lock "d49fdd3f-3ad6-4396-811f-67f1ef1f2940" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.416045] env[68638]: DEBUG oslo_concurrency.lockutils [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Lock "d49fdd3f-3ad6-4396-811f-67f1ef1f2940" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 806.689666] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Acquiring lock "refresh_cache-63669b15-2ec8-4a0d-b772-6ef7407e8ebf" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.689811] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Acquired lock "refresh_cache-63669b15-2ec8-4a0d-b772-6ef7407e8ebf" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 806.689965] env[68638]: DEBUG nova.network.neutron [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 806.708197] env[68638]: DEBUG nova.compute.utils [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 806.709905] env[68638]: DEBUG nova.compute.manager [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 806.710628] env[68638]: DEBUG nova.network.neutron [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 806.773962] env[68638]: DEBUG nova.policy [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '20c12f8fec1e4f339046dcbd4ac0672f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca0573880687407ba35bf95f124b249b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 807.172078] env[68638]: DEBUG nova.network.neutron [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Successfully created port: 27864a3f-f30d-4d4d-b336-b866e4643d1f {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 807.216361] env[68638]: DEBUG nova.compute.manager [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 807.221976] env[68638]: DEBUG nova.network.neutron [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.253077] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fefcd8cb-98ad-4b29-b19d-ee41481ef1ff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.263174] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9efd85-4438-4b6d-bb5b-b8f77cb1285a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.297812] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9367d00b-7025-4dd2-b28d-1c1ed1dfed87 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.306124] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e47f4d3b-a2b7-4cb1-b3fe-4f640db3b0c0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.320824] env[68638]: DEBUG nova.compute.provider_tree [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 807.397957] env[68638]: DEBUG nova.network.neutron [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Updating instance_info_cache with network_info: [{"id": "2c1b0e86-ed9d-4de7-9bcc-3c49c155a2c8", "address": "fa:16:3e:f5:bd:da", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.204", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c1b0e86-ed", "ovs_interfaceid": "2c1b0e86-ed9d-4de7-9bcc-3c49c155a2c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.825301] env[68638]: DEBUG nova.scheduler.client.report [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 807.900406] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Releasing lock "refresh_cache-63669b15-2ec8-4a0d-b772-6ef7407e8ebf" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 807.900903] env[68638]: DEBUG nova.compute.manager [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Instance network_info: |[{"id": "2c1b0e86-ed9d-4de7-9bcc-3c49c155a2c8", "address": "fa:16:3e:f5:bd:da", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.204", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c1b0e86-ed", "ovs_interfaceid": "2c1b0e86-ed9d-4de7-9bcc-3c49c155a2c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 807.901496] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:bd:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '205fb402-8eaf-4b61-8f57-8f216024179a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c1b0e86-ed9d-4de7-9bcc-3c49c155a2c8', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 807.910021] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 807.910021] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 807.910154] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-23b1cf94-b10e-46c6-b42d-b03b275b9c13 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.935613] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 807.935613] env[68638]: value = "task-2833606" [ 807.935613] env[68638]: _type = "Task" [ 807.935613] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.945017] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833606, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.194675] env[68638]: DEBUG nova.compute.manager [req-3f30a8bf-6387-4ba8-a3ba-b17dfc6c7ee7 req-e02664c8-20f6-400d-8f07-d7093fb974f3 service nova] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Received event network-changed-2c1b0e86-ed9d-4de7-9bcc-3c49c155a2c8 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 808.195047] env[68638]: DEBUG nova.compute.manager [req-3f30a8bf-6387-4ba8-a3ba-b17dfc6c7ee7 req-e02664c8-20f6-400d-8f07-d7093fb974f3 service nova] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Refreshing instance network info cache due to event network-changed-2c1b0e86-ed9d-4de7-9bcc-3c49c155a2c8. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 808.195299] env[68638]: DEBUG oslo_concurrency.lockutils [req-3f30a8bf-6387-4ba8-a3ba-b17dfc6c7ee7 req-e02664c8-20f6-400d-8f07-d7093fb974f3 service nova] Acquiring lock "refresh_cache-63669b15-2ec8-4a0d-b772-6ef7407e8ebf" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.195506] env[68638]: DEBUG oslo_concurrency.lockutils [req-3f30a8bf-6387-4ba8-a3ba-b17dfc6c7ee7 req-e02664c8-20f6-400d-8f07-d7093fb974f3 service nova] Acquired lock "refresh_cache-63669b15-2ec8-4a0d-b772-6ef7407e8ebf" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 808.195933] env[68638]: DEBUG nova.network.neutron [req-3f30a8bf-6387-4ba8-a3ba-b17dfc6c7ee7 req-e02664c8-20f6-400d-8f07-d7093fb974f3 service nova] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Refreshing network info cache for port 2c1b0e86-ed9d-4de7-9bcc-3c49c155a2c8 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 808.226167] env[68638]: DEBUG nova.compute.manager [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 808.254222] env[68638]: DEBUG nova.virt.hardware [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 808.254534] env[68638]: DEBUG nova.virt.hardware [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 808.254778] env[68638]: DEBUG nova.virt.hardware [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 808.255030] env[68638]: DEBUG nova.virt.hardware [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 808.255226] env[68638]: DEBUG nova.virt.hardware [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 808.255438] env[68638]: DEBUG nova.virt.hardware [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 808.255671] env[68638]: DEBUG nova.virt.hardware [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 808.255861] env[68638]: DEBUG nova.virt.hardware [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 808.256083] env[68638]: DEBUG nova.virt.hardware [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 808.256392] env[68638]: DEBUG nova.virt.hardware [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 808.256616] env[68638]: DEBUG nova.virt.hardware [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 808.257565] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89369ea2-ffee-4cae-b1cd-f1f9bed72796 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.266894] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fcb49c2-291e-4d23-b1b4-d2c231ea17f4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.329976] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.125s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.334050] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.896s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 808.334050] env[68638]: DEBUG nova.objects.instance [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Lazy-loading 'resources' on Instance uuid ac0141c2-aef6-4edf-913a-d4a41b502c10 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 808.357357] env[68638]: INFO nova.scheduler.client.report [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Deleted allocations for instance 1946baab-bb48-4138-8db6-1f530e432c3d [ 808.445913] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833606, 'name': CreateVM_Task, 'duration_secs': 0.381078} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.446113] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 808.446803] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.446971] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 808.447303] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 808.447555] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99eb9239-40d4-4f83-b2e8-64d975361a12 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.452506] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 808.452506] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52650202-150a-f8fe-a468-feb6e137597c" [ 808.452506] env[68638]: _type = "Task" [ 808.452506] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.460634] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52650202-150a-f8fe-a468-feb6e137597c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.713608] env[68638]: DEBUG nova.network.neutron [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Successfully updated port: 27864a3f-f30d-4d4d-b336-b866e4643d1f {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 808.864979] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f7862ccc-3ebb-48ca-b017-ca5dbdac99d9 tempest-ListServerFiltersTestJSON-225846415 tempest-ListServerFiltersTestJSON-225846415-project-member] Lock "1946baab-bb48-4138-8db6-1f530e432c3d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.666s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.925998] env[68638]: DEBUG nova.network.neutron [req-3f30a8bf-6387-4ba8-a3ba-b17dfc6c7ee7 req-e02664c8-20f6-400d-8f07-d7093fb974f3 service nova] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Updated VIF entry in instance network info cache for port 2c1b0e86-ed9d-4de7-9bcc-3c49c155a2c8. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 808.926412] env[68638]: DEBUG nova.network.neutron [req-3f30a8bf-6387-4ba8-a3ba-b17dfc6c7ee7 req-e02664c8-20f6-400d-8f07-d7093fb974f3 service nova] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Updating instance_info_cache with network_info: [{"id": "2c1b0e86-ed9d-4de7-9bcc-3c49c155a2c8", "address": "fa:16:3e:f5:bd:da", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.204", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c1b0e86-ed", "ovs_interfaceid": "2c1b0e86-ed9d-4de7-9bcc-3c49c155a2c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.965212] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52650202-150a-f8fe-a468-feb6e137597c, 'name': SearchDatastore_Task, 'duration_secs': 0.012395} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.965521] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 808.965759] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 808.965987] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.966146] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 808.966327] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 808.966610] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e633e9e6-25ef-4abc-b4b5-13706b82bb5e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.978638] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 808.978838] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 808.981612] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2907d2b-c7d6-471f-91ec-652d087d5e28 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.987491] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 808.987491] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52a8b950-e59c-caa3-dde5-60c096c5ff3d" [ 808.987491] env[68638]: _type = "Task" [ 808.987491] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.997541] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a8b950-e59c-caa3-dde5-60c096c5ff3d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.217524] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Acquiring lock "refresh_cache-96848760-c8a0-43fa-ac7c-e6e56d6d6d83" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.217734] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Acquired lock "refresh_cache-96848760-c8a0-43fa-ac7c-e6e56d6d6d83" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 809.217890] env[68638]: DEBUG nova.network.neutron [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 809.323417] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b390314a-4ab8-491f-97a6-5b8bf927781f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.333203] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ad7e8b-30fe-4930-ac09-6d06d8e376d0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.366055] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374e532b-d042-4d95-83a4-4863d6cfd075 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.374795] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a64d74ad-d1ba-4611-9016-f3d24725c59b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.389499] env[68638]: DEBUG nova.compute.provider_tree [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 809.429134] env[68638]: DEBUG oslo_concurrency.lockutils [req-3f30a8bf-6387-4ba8-a3ba-b17dfc6c7ee7 req-e02664c8-20f6-400d-8f07-d7093fb974f3 service nova] Releasing lock "refresh_cache-63669b15-2ec8-4a0d-b772-6ef7407e8ebf" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 809.501878] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a8b950-e59c-caa3-dde5-60c096c5ff3d, 'name': SearchDatastore_Task, 'duration_secs': 0.011548} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.501878] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bece83ff-1abb-40f3-9a22-8f2d14350f55 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.508015] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 809.508015] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52892f13-fd9c-eba1-59ba-f06ceeac2d3c" [ 809.508015] env[68638]: _type = "Task" [ 809.508015] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.515551] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52892f13-fd9c-eba1-59ba-f06ceeac2d3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.784781] env[68638]: DEBUG nova.network.neutron [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 809.896158] env[68638]: DEBUG nova.scheduler.client.report [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 809.964658] env[68638]: DEBUG nova.network.neutron [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Updating instance_info_cache with network_info: [{"id": "27864a3f-f30d-4d4d-b336-b866e4643d1f", "address": "fa:16:3e:ac:79:00", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.93", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27864a3f-f3", "ovs_interfaceid": "27864a3f-f30d-4d4d-b336-b866e4643d1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.020015] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52892f13-fd9c-eba1-59ba-f06ceeac2d3c, 'name': SearchDatastore_Task, 'duration_secs': 0.025335} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.020297] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 810.020560] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 63669b15-2ec8-4a0d-b772-6ef7407e8ebf/63669b15-2ec8-4a0d-b772-6ef7407e8ebf.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 810.020868] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cb6db229-0dde-4a73-9dd1-13afd1ebc6e5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.027718] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 810.027718] env[68638]: value = "task-2833607" [ 810.027718] env[68638]: _type = "Task" [ 810.027718] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.035862] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833607, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.279406] env[68638]: DEBUG nova.compute.manager [req-fb1567a0-3069-47a0-80f5-62b637a58b18 req-306aaf65-0b9c-42f3-88e5-4d1439c73d29 service nova] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Received event network-vif-plugged-27864a3f-f30d-4d4d-b336-b866e4643d1f {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 810.279654] env[68638]: DEBUG oslo_concurrency.lockutils [req-fb1567a0-3069-47a0-80f5-62b637a58b18 req-306aaf65-0b9c-42f3-88e5-4d1439c73d29 service nova] Acquiring lock "96848760-c8a0-43fa-ac7c-e6e56d6d6d83-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.279873] env[68638]: DEBUG oslo_concurrency.lockutils [req-fb1567a0-3069-47a0-80f5-62b637a58b18 req-306aaf65-0b9c-42f3-88e5-4d1439c73d29 service nova] Lock "96848760-c8a0-43fa-ac7c-e6e56d6d6d83-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.280086] env[68638]: DEBUG oslo_concurrency.lockutils [req-fb1567a0-3069-47a0-80f5-62b637a58b18 req-306aaf65-0b9c-42f3-88e5-4d1439c73d29 service nova] Lock "96848760-c8a0-43fa-ac7c-e6e56d6d6d83-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 810.280260] env[68638]: DEBUG nova.compute.manager [req-fb1567a0-3069-47a0-80f5-62b637a58b18 req-306aaf65-0b9c-42f3-88e5-4d1439c73d29 service nova] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] No waiting events found dispatching network-vif-plugged-27864a3f-f30d-4d4d-b336-b866e4643d1f {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 810.280424] env[68638]: WARNING nova.compute.manager [req-fb1567a0-3069-47a0-80f5-62b637a58b18 req-306aaf65-0b9c-42f3-88e5-4d1439c73d29 service nova] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Received unexpected event network-vif-plugged-27864a3f-f30d-4d4d-b336-b866e4643d1f for instance with vm_state building and task_state spawning. [ 810.280580] env[68638]: DEBUG nova.compute.manager [req-fb1567a0-3069-47a0-80f5-62b637a58b18 req-306aaf65-0b9c-42f3-88e5-4d1439c73d29 service nova] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Received event network-changed-27864a3f-f30d-4d4d-b336-b866e4643d1f {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 810.280729] env[68638]: DEBUG nova.compute.manager [req-fb1567a0-3069-47a0-80f5-62b637a58b18 req-306aaf65-0b9c-42f3-88e5-4d1439c73d29 service nova] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Refreshing instance network info cache due to event network-changed-27864a3f-f30d-4d4d-b336-b866e4643d1f. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 810.280893] env[68638]: DEBUG oslo_concurrency.lockutils [req-fb1567a0-3069-47a0-80f5-62b637a58b18 req-306aaf65-0b9c-42f3-88e5-4d1439c73d29 service nova] Acquiring lock "refresh_cache-96848760-c8a0-43fa-ac7c-e6e56d6d6d83" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.403021] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.069s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 810.406169] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.641s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.407739] env[68638]: INFO nova.compute.claims [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 810.436718] env[68638]: INFO nova.scheduler.client.report [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Deleted allocations for instance ac0141c2-aef6-4edf-913a-d4a41b502c10 [ 810.468664] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Releasing lock "refresh_cache-96848760-c8a0-43fa-ac7c-e6e56d6d6d83" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 810.469121] env[68638]: DEBUG nova.compute.manager [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Instance network_info: |[{"id": "27864a3f-f30d-4d4d-b336-b866e4643d1f", "address": "fa:16:3e:ac:79:00", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.93", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27864a3f-f3", "ovs_interfaceid": "27864a3f-f30d-4d4d-b336-b866e4643d1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 810.469780] env[68638]: DEBUG oslo_concurrency.lockutils [req-fb1567a0-3069-47a0-80f5-62b637a58b18 req-306aaf65-0b9c-42f3-88e5-4d1439c73d29 service nova] Acquired lock "refresh_cache-96848760-c8a0-43fa-ac7c-e6e56d6d6d83" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 810.470157] env[68638]: DEBUG nova.network.neutron [req-fb1567a0-3069-47a0-80f5-62b637a58b18 req-306aaf65-0b9c-42f3-88e5-4d1439c73d29 service nova] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Refreshing network info cache for port 27864a3f-f30d-4d4d-b336-b866e4643d1f {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 810.471635] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:79:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '205fb402-8eaf-4b61-8f57-8f216024179a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '27864a3f-f30d-4d4d-b336-b866e4643d1f', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 810.481075] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Creating folder: Project (ca0573880687407ba35bf95f124b249b). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 810.482346] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-798a7dc5-65c8-429b-99f0-f3e69cfc3969 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.495142] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Created folder: Project (ca0573880687407ba35bf95f124b249b) in parent group-v569734. [ 810.495382] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Creating folder: Instances. Parent ref: group-v569898. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 810.496347] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cd3e3c26-c6be-4d76-a98a-edabf9fb577a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.506792] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Created folder: Instances in parent group-v569898. [ 810.507048] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 810.507244] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 810.507463] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-89714f95-2b68-4257-a8e0-c9bf8a6ebb4a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.527980] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 810.527980] env[68638]: value = "task-2833610" [ 810.527980] env[68638]: _type = "Task" [ 810.527980] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.540510] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833607, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474913} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.543572] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 63669b15-2ec8-4a0d-b772-6ef7407e8ebf/63669b15-2ec8-4a0d-b772-6ef7407e8ebf.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 810.543799] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 810.544021] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833610, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.544221] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4158e692-dcb0-4f2a-b16e-4a60759cde24 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.550826] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 810.550826] env[68638]: value = "task-2833611" [ 810.550826] env[68638]: _type = "Task" [ 810.550826] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.558882] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833611, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.953982] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1ffa6d06-f8ff-4e27-b48f-79412f4899a9 tempest-ServersV294TestFqdnHostnames-124206622 tempest-ServersV294TestFqdnHostnames-124206622-project-member] Lock "ac0141c2-aef6-4edf-913a-d4a41b502c10" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.945s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.040240] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833610, 'name': CreateVM_Task, 'duration_secs': 0.325823} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.040428] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 811.041077] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.041298] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 811.041644] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 811.041904] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-434f3390-d203-46ff-89ba-d15f4fdb140a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.046733] env[68638]: DEBUG oslo_vmware.api [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Waiting for the task: (returnval){ [ 811.046733] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5230e683-a35d-48b4-6b0e-58aac62733b1" [ 811.046733] env[68638]: _type = "Task" [ 811.046733] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.056602] env[68638]: DEBUG oslo_vmware.api [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5230e683-a35d-48b4-6b0e-58aac62733b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.063902] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833611, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070915} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.064407] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 811.065273] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e661c736-79ca-4b53-9138-9ebb285c13cd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.091048] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 63669b15-2ec8-4a0d-b772-6ef7407e8ebf/63669b15-2ec8-4a0d-b772-6ef7407e8ebf.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 811.094013] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a654183e-ae6a-48bc-b42e-59d865067268 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.121520] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 811.121520] env[68638]: value = "task-2833612" [ 811.121520] env[68638]: _type = "Task" [ 811.121520] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.132305] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833612, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.232343] env[68638]: DEBUG nova.network.neutron [req-fb1567a0-3069-47a0-80f5-62b637a58b18 req-306aaf65-0b9c-42f3-88e5-4d1439c73d29 service nova] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Updated VIF entry in instance network info cache for port 27864a3f-f30d-4d4d-b336-b866e4643d1f. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 811.232825] env[68638]: DEBUG nova.network.neutron [req-fb1567a0-3069-47a0-80f5-62b637a58b18 req-306aaf65-0b9c-42f3-88e5-4d1439c73d29 service nova] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Updating instance_info_cache with network_info: [{"id": "27864a3f-f30d-4d4d-b336-b866e4643d1f", "address": "fa:16:3e:ac:79:00", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.93", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27864a3f-f3", "ovs_interfaceid": "27864a3f-f30d-4d4d-b336-b866e4643d1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.559824] env[68638]: DEBUG oslo_vmware.api [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5230e683-a35d-48b4-6b0e-58aac62733b1, 'name': SearchDatastore_Task, 'duration_secs': 0.011563} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.559824] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 811.559824] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 811.559824] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.559824] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 811.560365] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 811.560365] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8fba11d7-bb84-4c16-83fa-be4d4a5d1016 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.571994] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 811.572284] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 811.573121] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2101b44-727a-443f-8fcf-d021b70f62fd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.581232] env[68638]: DEBUG oslo_vmware.api [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Waiting for the task: (returnval){ [ 811.581232] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52b07090-2f67-4a71-d0e7-cf43cfaea75a" [ 811.581232] env[68638]: _type = "Task" [ 811.581232] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.589434] env[68638]: DEBUG oslo_vmware.api [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b07090-2f67-4a71-d0e7-cf43cfaea75a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.634678] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833612, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.759254] env[68638]: DEBUG oslo_concurrency.lockutils [req-fb1567a0-3069-47a0-80f5-62b637a58b18 req-306aaf65-0b9c-42f3-88e5-4d1439c73d29 service nova] Releasing lock "refresh_cache-96848760-c8a0-43fa-ac7c-e6e56d6d6d83" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 812.041026] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c88524b5-57e5-46a8-847f-dee6f666eacc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.048956] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1130b1b-ea43-44cb-9e87-87885a18ba2d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.093599] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a489bed-3257-466b-ae84-126544efaef9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.100155] env[68638]: DEBUG oslo_vmware.api [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b07090-2f67-4a71-d0e7-cf43cfaea75a, 'name': SearchDatastore_Task, 'duration_secs': 0.009286} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.103586] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06a6e0bb-e1a5-4a8f-a3d4-de99bb1c0dcf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.107427] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba53ccd8-62ad-478c-8a77-dc86c4127799 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.116709] env[68638]: DEBUG oslo_vmware.api [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Waiting for the task: (returnval){ [ 812.116709] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52fc2995-9c07-857e-6b05-b11a814b2dc5" [ 812.116709] env[68638]: _type = "Task" [ 812.116709] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.122177] env[68638]: DEBUG nova.compute.provider_tree [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 812.136168] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833612, 'name': ReconfigVM_Task, 'duration_secs': 0.585161} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.138706] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 63669b15-2ec8-4a0d-b772-6ef7407e8ebf/63669b15-2ec8-4a0d-b772-6ef7407e8ebf.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 812.139395] env[68638]: DEBUG oslo_vmware.api [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52fc2995-9c07-857e-6b05-b11a814b2dc5, 'name': SearchDatastore_Task, 'duration_secs': 0.011124} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.140723] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4d579591-19ae-4f2c-b38f-5e5d80a19a02 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.145450] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 812.145450] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 96848760-c8a0-43fa-ac7c-e6e56d6d6d83/96848760-c8a0-43fa-ac7c-e6e56d6d6d83.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 812.145450] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0e8d605d-0201-4fc9-b7f3-4125c3fff1d6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.149306] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 812.149306] env[68638]: value = "task-2833613" [ 812.149306] env[68638]: _type = "Task" [ 812.149306] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.153750] env[68638]: DEBUG oslo_vmware.api [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Waiting for the task: (returnval){ [ 812.153750] env[68638]: value = "task-2833614" [ 812.153750] env[68638]: _type = "Task" [ 812.153750] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.160689] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833613, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.166126] env[68638]: DEBUG oslo_vmware.api [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Task: {'id': task-2833614, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.629140] env[68638]: DEBUG nova.scheduler.client.report [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 812.661915] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833613, 'name': Rename_Task, 'duration_secs': 0.145516} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.662655] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 812.662950] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d483035f-b016-494a-ae5e-2be977b08a2d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.670892] env[68638]: DEBUG oslo_vmware.api [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Task: {'id': task-2833614, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.492374} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.673171] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 96848760-c8a0-43fa-ac7c-e6e56d6d6d83/96848760-c8a0-43fa-ac7c-e6e56d6d6d83.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 812.673476] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 812.675121] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-619cb104-1352-41e2-a4cc-c9249cd54629 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.678398] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 812.678398] env[68638]: value = "task-2833615" [ 812.678398] env[68638]: _type = "Task" [ 812.678398] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.684303] env[68638]: DEBUG oslo_vmware.api [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Waiting for the task: (returnval){ [ 812.684303] env[68638]: value = "task-2833616" [ 812.684303] env[68638]: _type = "Task" [ 812.684303] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.693308] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833615, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.697242] env[68638]: DEBUG oslo_vmware.api [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Task: {'id': task-2833616, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.139858] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.734s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.142797] env[68638]: DEBUG nova.compute.manager [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 813.144801] env[68638]: DEBUG oslo_concurrency.lockutils [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 48.250s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.145138] env[68638]: DEBUG nova.objects.instance [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Lazy-loading 'resources' on Instance uuid a3b06e32-2670-4381-bb91-4597bfcabaa6 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 813.195688] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833615, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.199948] env[68638]: DEBUG oslo_vmware.api [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Task: {'id': task-2833616, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069634} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.201043] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 813.202535] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e31ee51a-2446-49d5-9329-493efa3868cc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.229753] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] 96848760-c8a0-43fa-ac7c-e6e56d6d6d83/96848760-c8a0-43fa-ac7c-e6e56d6d6d83.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 813.229753] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c629086a-745f-4ab5-ae12-3bb3b2edffdd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.255737] env[68638]: DEBUG oslo_vmware.api [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Waiting for the task: (returnval){ [ 813.255737] env[68638]: value = "task-2833617" [ 813.255737] env[68638]: _type = "Task" [ 813.255737] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.267934] env[68638]: DEBUG oslo_vmware.api [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Task: {'id': task-2833617, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.648875] env[68638]: DEBUG nova.compute.utils [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 813.653754] env[68638]: DEBUG nova.compute.manager [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 813.654062] env[68638]: DEBUG nova.network.neutron [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 813.690729] env[68638]: DEBUG oslo_vmware.api [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833615, 'name': PowerOnVM_Task, 'duration_secs': 0.889939} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.693446] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 813.695603] env[68638]: INFO nova.compute.manager [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Took 8.11 seconds to spawn the instance on the hypervisor. [ 813.695603] env[68638]: DEBUG nova.compute.manager [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 813.696960] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee9cd382-1c52-4281-945f-dd2c7bfee801 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.737143] env[68638]: DEBUG nova.policy [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5b140aa82f044f108521ab8c0d28c0a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3e5757d1f74492481048df4a29032ca', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 813.770167] env[68638]: DEBUG oslo_vmware.api [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Task: {'id': task-2833617, 'name': ReconfigVM_Task, 'duration_secs': 0.340721} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.773586] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Reconfigured VM instance instance-00000039 to attach disk [datastore1] 96848760-c8a0-43fa-ac7c-e6e56d6d6d83/96848760-c8a0-43fa-ac7c-e6e56d6d6d83.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 813.774602] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-34e110ac-97cf-4966-9a56-1e4588b02760 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.783909] env[68638]: DEBUG oslo_vmware.api [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Waiting for the task: (returnval){ [ 813.783909] env[68638]: value = "task-2833618" [ 813.783909] env[68638]: _type = "Task" [ 813.783909] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.798539] env[68638]: DEBUG oslo_vmware.api [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Task: {'id': task-2833618, 'name': Rename_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.155335] env[68638]: DEBUG nova.compute.manager [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 814.190956] env[68638]: DEBUG nova.network.neutron [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Successfully created port: 7ccad3d9-aa85-4881-a1ec-c4e32106fb16 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 814.224590] env[68638]: INFO nova.compute.manager [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Took 57.13 seconds to build instance. [ 814.248407] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a3c327-6f9d-4238-8383-fc638c18527f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.257135] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcb41ec1-1e3d-4241-b2ec-d8a9bcfc3059 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.298539] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15349734-0c42-453b-afc1-1134413ef891 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.310061] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67115887-ffef-4c3c-a9ef-564bdb74f64f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.314022] env[68638]: DEBUG oslo_vmware.api [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Task: {'id': task-2833618, 'name': Rename_Task, 'duration_secs': 0.282264} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.314299] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 814.314912] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2c2fd08f-6987-4fcb-8a1c-f05dc39ccce2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.324289] env[68638]: DEBUG nova.compute.provider_tree [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 814.331032] env[68638]: DEBUG oslo_vmware.api [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Waiting for the task: (returnval){ [ 814.331032] env[68638]: value = "task-2833619" [ 814.331032] env[68638]: _type = "Task" [ 814.331032] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.350508] env[68638]: DEBUG oslo_vmware.api [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Task: {'id': task-2833619, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.727254] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d1a51fd9-1d2b-4c34-8904-886991feaec3 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lock "63669b15-2ec8-4a0d-b772-6ef7407e8ebf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 101.086s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 814.828637] env[68638]: DEBUG nova.scheduler.client.report [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 814.843469] env[68638]: DEBUG oslo_vmware.api [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Task: {'id': task-2833619, 'name': PowerOnVM_Task, 'duration_secs': 0.470959} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.844201] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 814.844527] env[68638]: INFO nova.compute.manager [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Took 6.62 seconds to spawn the instance on the hypervisor. [ 814.844783] env[68638]: DEBUG nova.compute.manager [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 814.846028] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30321692-eb33-435c-bf87-29601955dc96 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.175635] env[68638]: DEBUG nova.compute.manager [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 815.203718] env[68638]: DEBUG nova.virt.hardware [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 815.203997] env[68638]: DEBUG nova.virt.hardware [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 815.204170] env[68638]: DEBUG nova.virt.hardware [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 815.204372] env[68638]: DEBUG nova.virt.hardware [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 815.204553] env[68638]: DEBUG nova.virt.hardware [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 815.204697] env[68638]: DEBUG nova.virt.hardware [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 815.205249] env[68638]: DEBUG nova.virt.hardware [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 815.205471] env[68638]: DEBUG nova.virt.hardware [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 815.205696] env[68638]: DEBUG nova.virt.hardware [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 815.206312] env[68638]: DEBUG nova.virt.hardware [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 815.206478] env[68638]: DEBUG nova.virt.hardware [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 815.207686] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048934a7-ee01-4948-93b9-a6fb7b9e1bbd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.217946] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679a11d0-63e8-4f30-8b5e-f9805bdcc370 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.237543] env[68638]: DEBUG nova.compute.manager [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 815.339038] env[68638]: DEBUG oslo_concurrency.lockutils [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.194s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 815.342066] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 50.281s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 815.342131] env[68638]: DEBUG nova.objects.instance [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Lazy-loading 'resources' on Instance uuid 5294e1b6-f34f-4f91-aa3e-e0276ad982ee {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 815.370657] env[68638]: INFO nova.compute.manager [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Took 56.79 seconds to build instance. [ 815.375324] env[68638]: INFO nova.scheduler.client.report [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Deleted allocations for instance a3b06e32-2670-4381-bb91-4597bfcabaa6 [ 815.764583] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 815.872983] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c9ad9122-5241-4f83-90fe-e5aeaa51a1d1 tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Lock "96848760-c8a0-43fa-ac7c-e6e56d6d6d83" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 101.858s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 815.884710] env[68638]: DEBUG oslo_concurrency.lockutils [None req-16054a7a-1598-48e7-9734-9e7092e6dd5c tempest-TenantUsagesTestJSON-1302532436 tempest-TenantUsagesTestJSON-1302532436-project-member] Lock "a3b06e32-2670-4381-bb91-4597bfcabaa6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 54.812s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.200175] env[68638]: DEBUG nova.network.neutron [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Successfully updated port: 7ccad3d9-aa85-4881-a1ec-c4e32106fb16 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 816.218756] env[68638]: DEBUG nova.compute.manager [req-369c95ba-6091-4453-b252-79b00be4095d req-2fa40dfb-fa4e-4903-858c-32f82823c00b service nova] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Received event network-vif-plugged-7ccad3d9-aa85-4881-a1ec-c4e32106fb16 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 816.219180] env[68638]: DEBUG oslo_concurrency.lockutils [req-369c95ba-6091-4453-b252-79b00be4095d req-2fa40dfb-fa4e-4903-858c-32f82823c00b service nova] Acquiring lock "4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.219260] env[68638]: DEBUG oslo_concurrency.lockutils [req-369c95ba-6091-4453-b252-79b00be4095d req-2fa40dfb-fa4e-4903-858c-32f82823c00b service nova] Lock "4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.219420] env[68638]: DEBUG oslo_concurrency.lockutils [req-369c95ba-6091-4453-b252-79b00be4095d req-2fa40dfb-fa4e-4903-858c-32f82823c00b service nova] Lock "4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.219601] env[68638]: DEBUG nova.compute.manager [req-369c95ba-6091-4453-b252-79b00be4095d req-2fa40dfb-fa4e-4903-858c-32f82823c00b service nova] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] No waiting events found dispatching network-vif-plugged-7ccad3d9-aa85-4881-a1ec-c4e32106fb16 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 816.219792] env[68638]: WARNING nova.compute.manager [req-369c95ba-6091-4453-b252-79b00be4095d req-2fa40dfb-fa4e-4903-858c-32f82823c00b service nova] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Received unexpected event network-vif-plugged-7ccad3d9-aa85-4881-a1ec-c4e32106fb16 for instance with vm_state building and task_state spawning. [ 816.378894] env[68638]: DEBUG nova.compute.manager [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 816.414463] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85011e49-4c2a-4d8a-9d0b-026f0cab87ae {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.421382] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ad99a8-3acf-4a7f-a707-7688b262fbb9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.457074] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f11a7a-04de-4a55-aefe-1079f213bab5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.465687] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-440510d4-6482-4f56-bf00-94db403dcff0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.480957] env[68638]: DEBUG nova.compute.provider_tree [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 816.586854] env[68638]: DEBUG nova.compute.manager [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 816.587702] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-301249c5-c44a-48cb-b6f2-29594bc039e1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.709242] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "refresh_cache-4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.709695] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquired lock "refresh_cache-4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 816.709695] env[68638]: DEBUG nova.network.neutron [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 816.904097] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.987018] env[68638]: DEBUG nova.scheduler.client.report [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 817.104942] env[68638]: INFO nova.compute.manager [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] instance snapshotting [ 817.104942] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Acquiring lock "96848760-c8a0-43fa-ac7c-e6e56d6d6d83" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 817.104942] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Lock "96848760-c8a0-43fa-ac7c-e6e56d6d6d83" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 817.104942] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Acquiring lock "96848760-c8a0-43fa-ac7c-e6e56d6d6d83-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 817.107559] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Lock "96848760-c8a0-43fa-ac7c-e6e56d6d6d83-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 817.108145] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Lock "96848760-c8a0-43fa-ac7c-e6e56d6d6d83-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.003s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 817.112902] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d082cae-bd84-4104-8fb0-5da745a8cda7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.118239] env[68638]: INFO nova.compute.manager [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Terminating instance [ 817.144017] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b597e087-ff09-40b1-b85f-ee7d8ce8ae53 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.260406] env[68638]: DEBUG nova.network.neutron [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 817.490213] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.148s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 817.492737] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 50.932s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 817.523606] env[68638]: INFO nova.scheduler.client.report [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Deleted allocations for instance 5294e1b6-f34f-4f91-aa3e-e0276ad982ee [ 817.541441] env[68638]: DEBUG nova.network.neutron [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Updating instance_info_cache with network_info: [{"id": "7ccad3d9-aa85-4881-a1ec-c4e32106fb16", "address": "fa:16:3e:65:6f:78", "network": {"id": "104a324f-fd5a-4c74-9a7a-6126392ea10c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1310127541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3e5757d1f74492481048df4a29032ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ccad3d9-aa", "ovs_interfaceid": "7ccad3d9-aa85-4881-a1ec-c4e32106fb16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.624026] env[68638]: DEBUG nova.compute.manager [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 817.624026] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 817.624953] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-719a9d4d-6def-433b-bb64-f8797986542e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.633596] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 817.633845] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4bcf926a-ac95-487d-b12f-f3ef2d901163 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.642019] env[68638]: DEBUG oslo_vmware.api [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Waiting for the task: (returnval){ [ 817.642019] env[68638]: value = "task-2833620" [ 817.642019] env[68638]: _type = "Task" [ 817.642019] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.649193] env[68638]: DEBUG oslo_vmware.api [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Task: {'id': task-2833620, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.656855] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Creating Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 817.657632] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-be1bec78-a5ec-44c0-a1c2-cdcb9d44ca79 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.670510] env[68638]: DEBUG oslo_vmware.api [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 817.670510] env[68638]: value = "task-2833621" [ 817.670510] env[68638]: _type = "Task" [ 817.670510] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.679178] env[68638]: DEBUG oslo_vmware.api [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833621, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.997686] env[68638]: INFO nova.compute.claims [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 818.037178] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2020f48b-cc41-4bf4-b4d1-b692e15f8ace tempest-ServerRescueTestJSONUnderV235-50782730 tempest-ServerRescueTestJSONUnderV235-50782730-project-member] Lock "5294e1b6-f34f-4f91-aa3e-e0276ad982ee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 56.994s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 818.045011] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Releasing lock "refresh_cache-4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 818.045343] env[68638]: DEBUG nova.compute.manager [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Instance network_info: |[{"id": "7ccad3d9-aa85-4881-a1ec-c4e32106fb16", "address": "fa:16:3e:65:6f:78", "network": {"id": "104a324f-fd5a-4c74-9a7a-6126392ea10c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1310127541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3e5757d1f74492481048df4a29032ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ccad3d9-aa", "ovs_interfaceid": "7ccad3d9-aa85-4881-a1ec-c4e32106fb16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 818.046081] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:6f:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3d2e4070-a78e-4d08-a104-b6312ab65577', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7ccad3d9-aa85-4881-a1ec-c4e32106fb16', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 818.054810] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 818.056148] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 818.056443] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d8b304a0-9800-4844-8cca-a3f5478bebe2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.080019] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 818.080019] env[68638]: value = "task-2833622" [ 818.080019] env[68638]: _type = "Task" [ 818.080019] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.088407] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833622, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.151829] env[68638]: DEBUG oslo_vmware.api [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Task: {'id': task-2833620, 'name': PowerOffVM_Task, 'duration_secs': 0.192879} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.152239] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 818.152496] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 818.152813] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4b67299f-ffc7-4208-860e-6f08d4fb9cbc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.181188] env[68638]: DEBUG oslo_vmware.api [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833621, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.367887] env[68638]: DEBUG nova.compute.manager [req-5182d823-f406-45b0-a865-b4c7bebcfdd1 req-459e612b-e5d4-483a-adf1-f9115e1dee02 service nova] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Received event network-changed-7ccad3d9-aa85-4881-a1ec-c4e32106fb16 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 818.368119] env[68638]: DEBUG nova.compute.manager [req-5182d823-f406-45b0-a865-b4c7bebcfdd1 req-459e612b-e5d4-483a-adf1-f9115e1dee02 service nova] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Refreshing instance network info cache due to event network-changed-7ccad3d9-aa85-4881-a1ec-c4e32106fb16. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 818.368546] env[68638]: DEBUG oslo_concurrency.lockutils [req-5182d823-f406-45b0-a865-b4c7bebcfdd1 req-459e612b-e5d4-483a-adf1-f9115e1dee02 service nova] Acquiring lock "refresh_cache-4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.368643] env[68638]: DEBUG oslo_concurrency.lockutils [req-5182d823-f406-45b0-a865-b4c7bebcfdd1 req-459e612b-e5d4-483a-adf1-f9115e1dee02 service nova] Acquired lock "refresh_cache-4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 818.368902] env[68638]: DEBUG nova.network.neutron [req-5182d823-f406-45b0-a865-b4c7bebcfdd1 req-459e612b-e5d4-483a-adf1-f9115e1dee02 service nova] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Refreshing network info cache for port 7ccad3d9-aa85-4881-a1ec-c4e32106fb16 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 818.510476] env[68638]: INFO nova.compute.resource_tracker [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Updating resource usage from migration e1da74ab-012b-46a6-9b56-2cbd2d894fe2 [ 818.591420] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833622, 'name': CreateVM_Task, 'duration_secs': 0.351883} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.591606] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 818.592318] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.593111] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 818.593111] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 818.595368] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0d48096-392b-4a26-a50b-d4786545ac84 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.600359] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 818.600359] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52907284-2cb6-ae63-63c9-76dbecf2b272" [ 818.600359] env[68638]: _type = "Task" [ 818.600359] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.610304] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52907284-2cb6-ae63-63c9-76dbecf2b272, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.679918] env[68638]: DEBUG oslo_vmware.api [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833621, 'name': CreateSnapshot_Task, 'duration_secs': 0.86663} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.681131] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Created Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 818.681131] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2b1232-0e68-4e15-94ba-84cafc575b81 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.039832] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff4ebbe-cd95-4c7f-aba9-5389f20f5b26 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.053250] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a87ba44-e08f-4163-abd2-015eb2026112 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.085981] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d65124f7-20d0-45a5-a2bf-1f2383af8026 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.093504] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd45a2d4-f0f0-4fb4-a562-5688c65ac38a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.109979] env[68638]: DEBUG nova.compute.provider_tree [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 819.119838] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52907284-2cb6-ae63-63c9-76dbecf2b272, 'name': SearchDatastore_Task, 'duration_secs': 0.010688} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.124098] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 819.124439] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 819.124960] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.124960] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.125123] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 819.125672] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-38978ea9-4c56-4cfe-858c-f151420ed96d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.137373] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 819.137565] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 819.138345] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6dfc1faf-28e6-4588-8f28-52900f9a2221 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.143774] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 819.143774] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]526cb0df-07d9-9eb2-9207-befcb1956e4b" [ 819.143774] env[68638]: _type = "Task" [ 819.143774] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.153525] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]526cb0df-07d9-9eb2-9207-befcb1956e4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.162771] env[68638]: DEBUG nova.network.neutron [req-5182d823-f406-45b0-a865-b4c7bebcfdd1 req-459e612b-e5d4-483a-adf1-f9115e1dee02 service nova] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Updated VIF entry in instance network info cache for port 7ccad3d9-aa85-4881-a1ec-c4e32106fb16. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 819.163130] env[68638]: DEBUG nova.network.neutron [req-5182d823-f406-45b0-a865-b4c7bebcfdd1 req-459e612b-e5d4-483a-adf1-f9115e1dee02 service nova] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Updating instance_info_cache with network_info: [{"id": "7ccad3d9-aa85-4881-a1ec-c4e32106fb16", "address": "fa:16:3e:65:6f:78", "network": {"id": "104a324f-fd5a-4c74-9a7a-6126392ea10c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1310127541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3e5757d1f74492481048df4a29032ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ccad3d9-aa", "ovs_interfaceid": "7ccad3d9-aa85-4881-a1ec-c4e32106fb16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.208685] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Creating linked-clone VM from snapshot {{(pid=68638) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 819.209768] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-27724b1b-29b9-4e82-af5e-1607eec65c03 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.220857] env[68638]: DEBUG oslo_vmware.api [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 819.220857] env[68638]: value = "task-2833624" [ 819.220857] env[68638]: _type = "Task" [ 819.220857] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.233948] env[68638]: DEBUG oslo_vmware.api [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833624, 'name': CloneVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.332366] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Acquiring lock "92c90438-f7cc-4a48-bfac-f7912709cf88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.332753] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Lock "92c90438-f7cc-4a48-bfac-f7912709cf88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.346530] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 819.346794] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 819.347072] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Deleting the datastore file [datastore1] 96848760-c8a0-43fa-ac7c-e6e56d6d6d83 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 819.347369] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-09845285-d0d9-4fd9-b17e-75f225ee35f4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.354629] env[68638]: DEBUG oslo_vmware.api [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Waiting for the task: (returnval){ [ 819.354629] env[68638]: value = "task-2833625" [ 819.354629] env[68638]: _type = "Task" [ 819.354629] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.369312] env[68638]: DEBUG oslo_vmware.api [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Task: {'id': task-2833625, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.620380] env[68638]: DEBUG nova.scheduler.client.report [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 819.656090] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]526cb0df-07d9-9eb2-9207-befcb1956e4b, 'name': SearchDatastore_Task, 'duration_secs': 0.009338} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.657501] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f04e2d2-ce84-4d5c-a08e-9ced7a69b7a9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.662496] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 819.662496] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]523cb64d-f140-c358-21c3-bf9b757181aa" [ 819.662496] env[68638]: _type = "Task" [ 819.662496] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.667888] env[68638]: DEBUG oslo_concurrency.lockutils [req-5182d823-f406-45b0-a865-b4c7bebcfdd1 req-459e612b-e5d4-483a-adf1-f9115e1dee02 service nova] Releasing lock "refresh_cache-4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 819.671320] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523cb64d-f140-c358-21c3-bf9b757181aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.731604] env[68638]: DEBUG oslo_vmware.api [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833624, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.866877] env[68638]: DEBUG oslo_vmware.api [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Task: {'id': task-2833625, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152903} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.871029] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 819.871029] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 819.871029] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 819.871029] env[68638]: INFO nova.compute.manager [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Took 2.24 seconds to destroy the instance on the hypervisor. [ 819.871029] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 819.871029] env[68638]: DEBUG nova.compute.manager [-] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 819.871029] env[68638]: DEBUG nova.network.neutron [-] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 820.128914] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.637s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 820.129194] env[68638]: INFO nova.compute.manager [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Migrating [ 820.140406] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 48.885s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 820.142905] env[68638]: DEBUG nova.objects.instance [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lazy-loading 'resources' on Instance uuid 2450602a-fde7-4a65-b7a2-be4195077758 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 820.175337] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523cb64d-f140-c358-21c3-bf9b757181aa, 'name': SearchDatastore_Task, 'duration_secs': 0.011118} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.179839] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 820.183418] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7/4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 820.186418] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f644da0e-3793-4e61-8a3d-06605feaedb9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.193692] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 820.193692] env[68638]: value = "task-2833626" [ 820.193692] env[68638]: _type = "Task" [ 820.193692] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.203735] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833626, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.234111] env[68638]: DEBUG oslo_vmware.api [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833624, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.436328] env[68638]: DEBUG nova.compute.manager [req-e9dc0610-ac1a-468f-8323-b07160766f69 req-0dec32d2-7069-4bcd-866f-f120c4ebdbdd service nova] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Received event network-vif-deleted-27864a3f-f30d-4d4d-b336-b866e4643d1f {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 820.436604] env[68638]: INFO nova.compute.manager [req-e9dc0610-ac1a-468f-8323-b07160766f69 req-0dec32d2-7069-4bcd-866f-f120c4ebdbdd service nova] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Neutron deleted interface 27864a3f-f30d-4d4d-b336-b866e4643d1f; detaching it from the instance and deleting it from the info cache [ 820.436844] env[68638]: DEBUG nova.network.neutron [req-e9dc0610-ac1a-468f-8323-b07160766f69 req-0dec32d2-7069-4bcd-866f-f120c4ebdbdd service nova] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.655924] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "refresh_cache-a09c4492-34fd-4010-b547-bfb5b61f252d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.656366] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquired lock "refresh_cache-a09c4492-34fd-4010-b547-bfb5b61f252d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 820.656366] env[68638]: DEBUG nova.network.neutron [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 820.684441] env[68638]: DEBUG nova.network.neutron [-] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.704766] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833626, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497012} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.708119] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7/4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 820.708387] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 820.709088] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7259c9c2-f9a2-4635-98b8-84c166b6d8c7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.715637] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 820.715637] env[68638]: value = "task-2833627" [ 820.715637] env[68638]: _type = "Task" [ 820.715637] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.726042] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833627, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.737080] env[68638]: DEBUG oslo_vmware.api [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833624, 'name': CloneVM_Task, 'duration_secs': 1.334673} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.737661] env[68638]: INFO nova.virt.vmwareapi.vmops [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Created linked-clone VM from snapshot [ 820.738444] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7681ee00-f42e-4af8-aa11-8a73db5da36b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.746318] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Uploading image dea7cdc8-5cef-4f0a-84c2-d56e00de0d61 {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 820.768436] env[68638]: DEBUG oslo_vmware.rw_handles [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 820.768436] env[68638]: value = "vm-569903" [ 820.768436] env[68638]: _type = "VirtualMachine" [ 820.768436] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 820.768710] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-67daf5af-b255-4b3a-8baf-cc14fae093bd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.779640] env[68638]: DEBUG oslo_vmware.rw_handles [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lease: (returnval){ [ 820.779640] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5262b71f-e2dc-0959-eb72-be83fd3a75e6" [ 820.779640] env[68638]: _type = "HttpNfcLease" [ 820.779640] env[68638]: } obtained for exporting VM: (result){ [ 820.779640] env[68638]: value = "vm-569903" [ 820.779640] env[68638]: _type = "VirtualMachine" [ 820.779640] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 820.779872] env[68638]: DEBUG oslo_vmware.api [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the lease: (returnval){ [ 820.779872] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5262b71f-e2dc-0959-eb72-be83fd3a75e6" [ 820.779872] env[68638]: _type = "HttpNfcLease" [ 820.779872] env[68638]: } to be ready. {{(pid=68638) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 820.788295] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 820.788295] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5262b71f-e2dc-0959-eb72-be83fd3a75e6" [ 820.788295] env[68638]: _type = "HttpNfcLease" [ 820.788295] env[68638]: } is initializing. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 820.940032] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a90a3c54-74d4-4596-86c3-70fd1975e56c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.952323] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a2025f-0881-47ea-88c4-1a84937c4f7c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.994048] env[68638]: DEBUG nova.compute.manager [req-e9dc0610-ac1a-468f-8323-b07160766f69 req-0dec32d2-7069-4bcd-866f-f120c4ebdbdd service nova] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Detach interface failed, port_id=27864a3f-f30d-4d4d-b336-b866e4643d1f, reason: Instance 96848760-c8a0-43fa-ac7c-e6e56d6d6d83 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 821.132283] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d91a15b1-8750-4bf3-9e23-882dc5b1f383 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.140480] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c731719a-6ab6-44c3-a5d5-5ad847612e03 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.175822] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7ca91f7-65e4-4b48-a012-08d97c853fda {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.184986] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3ddc116-fe58-4d31-883c-9dbfcd4d17bc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.188807] env[68638]: INFO nova.compute.manager [-] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Took 1.32 seconds to deallocate network for instance. [ 821.201227] env[68638]: DEBUG nova.compute.provider_tree [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 821.226425] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833627, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.176025} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.227353] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 821.228317] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ca0cca-954e-4196-be94-dc22d431ffc4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.253554] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7/4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 821.254428] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b705f457-3526-4697-af2c-b0180bae214e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.277525] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 821.277525] env[68638]: value = "task-2833629" [ 821.277525] env[68638]: _type = "Task" [ 821.277525] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.291146] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 821.291146] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5262b71f-e2dc-0959-eb72-be83fd3a75e6" [ 821.291146] env[68638]: _type = "HttpNfcLease" [ 821.291146] env[68638]: } is ready. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 821.294502] env[68638]: DEBUG oslo_vmware.rw_handles [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 821.294502] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5262b71f-e2dc-0959-eb72-be83fd3a75e6" [ 821.294502] env[68638]: _type = "HttpNfcLease" [ 821.294502] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 821.294771] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833629, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.298312] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a0e876a-9bb0-48bf-bb1a-923a9bc8076e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.307611] env[68638]: DEBUG oslo_vmware.rw_handles [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5245149c-2e9e-de96-bc4a-9a48435f4ffb/disk-0.vmdk from lease info. {{(pid=68638) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 821.307796] env[68638]: DEBUG oslo_vmware.rw_handles [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5245149c-2e9e-de96-bc4a-9a48435f4ffb/disk-0.vmdk for reading. {{(pid=68638) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 821.446933] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4a854f63-e0d6-43ec-8edb-f76d44321eff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.485392] env[68638]: DEBUG nova.network.neutron [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Updating instance_info_cache with network_info: [{"id": "0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce", "address": "fa:16:3e:e4:6e:8f", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.107", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e7e9cd6-7e", "ovs_interfaceid": "0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.704852] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 821.705686] env[68638]: DEBUG nova.scheduler.client.report [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 821.790077] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833629, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.988026] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Releasing lock "refresh_cache-a09c4492-34fd-4010-b547-bfb5b61f252d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 822.210656] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.070s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.213695] env[68638]: DEBUG oslo_concurrency.lockutils [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 47.846s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 822.215910] env[68638]: INFO nova.compute.claims [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 822.241540] env[68638]: INFO nova.scheduler.client.report [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Deleted allocations for instance 2450602a-fde7-4a65-b7a2-be4195077758 [ 822.289077] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833629, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.750686] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f3f2c352-d097-4d8d-9238-619d7af65ae3 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "2450602a-fde7-4a65-b7a2-be4195077758" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 55.429s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.790474] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833629, 'name': ReconfigVM_Task, 'duration_secs': 1.23813} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.790894] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Reconfigured VM instance instance-0000003a to attach disk [datastore1] 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7/4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 822.791661] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-52688ea1-778f-46d3-85c1-2850cde2c847 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.799377] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 822.799377] env[68638]: value = "task-2833630" [ 822.799377] env[68638]: _type = "Task" [ 822.799377] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.809175] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833630, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.311540] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833630, 'name': Rename_Task, 'duration_secs': 0.274014} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.311861] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 823.312140] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0e27b387-aeb1-4d90-bf82-f04528e314e7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.319982] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 823.319982] env[68638]: value = "task-2833631" [ 823.319982] env[68638]: _type = "Task" [ 823.319982] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.334158] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833631, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.503806] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ada68fc0-f5be-4646-b786-9d76f01818e9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.525501] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Updating instance 'a09c4492-34fd-4010-b547-bfb5b61f252d' progress to 0 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 823.830820] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a800de85-9fdf-48db-bc25-0e4953a2671b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.838779] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833631, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.844306] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b82989ee-9bc2-4418-815b-aa4490e7778f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.880023] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49c82c05-d53f-4a96-8e5a-b9e385992897 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.888420] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ded90c1-0ca9-4f35-97d9-1cde08ec0e93 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.903697] env[68638]: DEBUG nova.compute.provider_tree [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 824.032519] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 824.032854] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5009849f-26f5-4e18-b06b-43435a8be23a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.039902] env[68638]: DEBUG oslo_vmware.api [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 824.039902] env[68638]: value = "task-2833632" [ 824.039902] env[68638]: _type = "Task" [ 824.039902] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.050198] env[68638]: DEBUG oslo_vmware.api [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833632, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.334479] env[68638]: DEBUG oslo_vmware.api [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833631, 'name': PowerOnVM_Task, 'duration_secs': 0.630414} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.334815] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 824.335032] env[68638]: INFO nova.compute.manager [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Took 9.16 seconds to spawn the instance on the hypervisor. [ 824.335237] env[68638]: DEBUG nova.compute.manager [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 824.337214] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6f1630-79c8-499b-8978-673d1fc66fcc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.343033] env[68638]: DEBUG oslo_concurrency.lockutils [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquiring lock "3c3fcbca-2477-4037-a978-4b8e9ed0a690" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 824.343033] env[68638]: DEBUG oslo_concurrency.lockutils [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "3c3fcbca-2477-4037-a978-4b8e9ed0a690" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 824.409033] env[68638]: DEBUG nova.scheduler.client.report [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 824.550227] env[68638]: DEBUG oslo_vmware.api [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833632, 'name': PowerOffVM_Task, 'duration_secs': 0.306727} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.550504] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 824.550687] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Updating instance 'a09c4492-34fd-4010-b547-bfb5b61f252d' progress to 17 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 824.859055] env[68638]: INFO nova.compute.manager [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Took 60.11 seconds to build instance. [ 824.915764] env[68638]: DEBUG oslo_concurrency.lockutils [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.702s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 824.916357] env[68638]: DEBUG nova.compute.manager [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 824.919100] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 50.361s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 824.919334] env[68638]: DEBUG nova.objects.instance [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Lazy-loading 'resources' on Instance uuid 1eee31b7-db8b-4765-8cc2-4273717ef86e {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 825.056732] env[68638]: DEBUG nova.virt.hardware [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 825.057070] env[68638]: DEBUG nova.virt.hardware [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 825.057162] env[68638]: DEBUG nova.virt.hardware [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 825.057384] env[68638]: DEBUG nova.virt.hardware [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 825.057530] env[68638]: DEBUG nova.virt.hardware [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 825.058231] env[68638]: DEBUG nova.virt.hardware [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 825.058231] env[68638]: DEBUG nova.virt.hardware [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 825.058231] env[68638]: DEBUG nova.virt.hardware [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 825.058231] env[68638]: DEBUG nova.virt.hardware [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 825.058395] env[68638]: DEBUG nova.virt.hardware [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 825.058572] env[68638]: DEBUG nova.virt.hardware [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 825.063794] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb4f2e2e-bc18-40e1-b989-7bd616d79627 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.084704] env[68638]: DEBUG oslo_vmware.api [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 825.084704] env[68638]: value = "task-2833633" [ 825.084704] env[68638]: _type = "Task" [ 825.084704] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.094146] env[68638]: DEBUG oslo_vmware.api [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833633, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.361873] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f0b276a8-ce83-4e5a-b692-e2c456a9c8f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 104.648s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 825.423515] env[68638]: DEBUG nova.compute.utils [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 825.426261] env[68638]: DEBUG nova.compute.manager [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 825.426469] env[68638]: DEBUG nova.network.neutron [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 825.492136] env[68638]: DEBUG nova.policy [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9b433e593c2340e49eefa21c93b43f31', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6d127964153f4854b10dfc8f8eb0009d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 825.598521] env[68638]: DEBUG oslo_vmware.api [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833633, 'name': ReconfigVM_Task, 'duration_secs': 0.395913} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.601810] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Updating instance 'a09c4492-34fd-4010-b547-bfb5b61f252d' progress to 33 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 825.867363] env[68638]: DEBUG nova.compute.manager [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 825.901861] env[68638]: DEBUG nova.network.neutron [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Successfully created port: 0102f455-ad74-4bf4-a0b8-8a2ec1d59514 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 825.934112] env[68638]: DEBUG nova.compute.utils [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 825.964879] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c4e1cf-84e9-4308-9165-ba1b29ba10fe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.974639] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fcdf859-099c-4e29-9c8d-aa4e3055fc0d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.011464] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-379608d4-7d15-4fa9-8765-b93aab202ffb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.019384] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9fa3785-5dc6-45ca-b4ad-b8c44ed05d88 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.032972] env[68638]: DEBUG nova.compute.provider_tree [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 826.110146] env[68638]: DEBUG nova.virt.hardware [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:29:41Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='a203cce6-fe96-4a10-ad18-80d29521d33f',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-493947233',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 826.110414] env[68638]: DEBUG nova.virt.hardware [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 826.110581] env[68638]: DEBUG nova.virt.hardware [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 826.110774] env[68638]: DEBUG nova.virt.hardware [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 826.110929] env[68638]: DEBUG nova.virt.hardware [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 826.111086] env[68638]: DEBUG nova.virt.hardware [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 826.111296] env[68638]: DEBUG nova.virt.hardware [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 826.111561] env[68638]: DEBUG nova.virt.hardware [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 826.111750] env[68638]: DEBUG nova.virt.hardware [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 826.111917] env[68638]: DEBUG nova.virt.hardware [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 826.112125] env[68638]: DEBUG nova.virt.hardware [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 826.117592] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Reconfiguring VM instance instance-0000002f to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 826.118255] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed0ed981-e48e-44f7-ac97-c947e132bd56 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.136839] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-611f6b97-f123-430d-a4ef-a6acd1b2dfb0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.145020] env[68638]: DEBUG oslo_vmware.api [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 826.145020] env[68638]: value = "task-2833634" [ 826.145020] env[68638]: _type = "Task" [ 826.145020] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.149472] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-41dfeaeb-9459-4231-8218-0f963ede796f tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Suspending the VM {{(pid=68638) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 826.150097] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-d12cdac4-7961-4d3b-a795-ff0ac979a333 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.154443] env[68638]: DEBUG oslo_vmware.api [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833634, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.158565] env[68638]: DEBUG oslo_vmware.api [None req-41dfeaeb-9459-4231-8218-0f963ede796f tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 826.158565] env[68638]: value = "task-2833635" [ 826.158565] env[68638]: _type = "Task" [ 826.158565] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.169358] env[68638]: DEBUG oslo_vmware.api [None req-41dfeaeb-9459-4231-8218-0f963ede796f tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833635, 'name': SuspendVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.389056] env[68638]: DEBUG oslo_concurrency.lockutils [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 826.437453] env[68638]: DEBUG nova.compute.manager [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 826.536450] env[68638]: DEBUG nova.scheduler.client.report [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 826.652805] env[68638]: DEBUG oslo_vmware.api [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833634, 'name': ReconfigVM_Task, 'duration_secs': 0.17909} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.653673] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Reconfigured VM instance instance-0000002f to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 826.654800] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abd40508-fcad-4556-9763-14c0065be65f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.677234] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] a09c4492-34fd-4010-b547-bfb5b61f252d/a09c4492-34fd-4010-b547-bfb5b61f252d.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 826.680512] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cede1357-d852-4e1b-ac2e-fae2b766e472 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.698987] env[68638]: DEBUG oslo_vmware.api [None req-41dfeaeb-9459-4231-8218-0f963ede796f tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833635, 'name': SuspendVM_Task} progress is 62%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.700400] env[68638]: DEBUG oslo_vmware.api [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 826.700400] env[68638]: value = "task-2833636" [ 826.700400] env[68638]: _type = "Task" [ 826.700400] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.708453] env[68638]: DEBUG oslo_vmware.api [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833636, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.042689] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.123s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.045340] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 44.402s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 827.045541] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.047297] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68638) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 827.047297] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.146s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 827.048431] env[68638]: INFO nova.compute.claims [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 827.052242] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc01d9f6-7935-4af7-91dc-18a2d6932e46 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.061224] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6b025f8-4949-4505-aa36-9c7b2850df7a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.076470] env[68638]: INFO nova.scheduler.client.report [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Deleted allocations for instance 1eee31b7-db8b-4765-8cc2-4273717ef86e [ 827.078021] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef660db-8f2f-452c-83e4-75eaf2489c6c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.087348] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-738d3bcb-285f-46a8-8c99-2253abe78546 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.122160] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178615MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=68638) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 827.122357] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.180199] env[68638]: DEBUG oslo_vmware.api [None req-41dfeaeb-9459-4231-8218-0f963ede796f tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833635, 'name': SuspendVM_Task, 'duration_secs': 0.777165} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.180491] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-41dfeaeb-9459-4231-8218-0f963ede796f tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Suspended the VM {{(pid=68638) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 827.180670] env[68638]: DEBUG nova.compute.manager [None req-41dfeaeb-9459-4231-8218-0f963ede796f tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 827.181478] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd1d36b9-d0c6-4464-8c87-ea35643835c6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.209743] env[68638]: DEBUG oslo_vmware.api [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833636, 'name': ReconfigVM_Task, 'duration_secs': 0.313404} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.209999] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Reconfigured VM instance instance-0000002f to attach disk [datastore2] a09c4492-34fd-4010-b547-bfb5b61f252d/a09c4492-34fd-4010-b547-bfb5b61f252d.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 827.210289] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Updating instance 'a09c4492-34fd-4010-b547-bfb5b61f252d' progress to 50 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 827.450040] env[68638]: DEBUG nova.compute.manager [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 827.458794] env[68638]: DEBUG nova.compute.manager [req-defe2288-f357-4e4a-a142-a41dd6f8e9b1 req-f2df47db-a163-4e5a-8bfd-50d5a3c3897d service nova] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Received event network-vif-plugged-0102f455-ad74-4bf4-a0b8-8a2ec1d59514 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 827.458794] env[68638]: DEBUG oslo_concurrency.lockutils [req-defe2288-f357-4e4a-a142-a41dd6f8e9b1 req-f2df47db-a163-4e5a-8bfd-50d5a3c3897d service nova] Acquiring lock "27ff37a6-de93-4a4b-904f-a91fdb8b0aff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.458794] env[68638]: DEBUG oslo_concurrency.lockutils [req-defe2288-f357-4e4a-a142-a41dd6f8e9b1 req-f2df47db-a163-4e5a-8bfd-50d5a3c3897d service nova] Lock "27ff37a6-de93-4a4b-904f-a91fdb8b0aff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 827.458794] env[68638]: DEBUG oslo_concurrency.lockutils [req-defe2288-f357-4e4a-a142-a41dd6f8e9b1 req-f2df47db-a163-4e5a-8bfd-50d5a3c3897d service nova] Lock "27ff37a6-de93-4a4b-904f-a91fdb8b0aff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.458794] env[68638]: DEBUG nova.compute.manager [req-defe2288-f357-4e4a-a142-a41dd6f8e9b1 req-f2df47db-a163-4e5a-8bfd-50d5a3c3897d service nova] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] No waiting events found dispatching network-vif-plugged-0102f455-ad74-4bf4-a0b8-8a2ec1d59514 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 827.458794] env[68638]: WARNING nova.compute.manager [req-defe2288-f357-4e4a-a142-a41dd6f8e9b1 req-f2df47db-a163-4e5a-8bfd-50d5a3c3897d service nova] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Received unexpected event network-vif-plugged-0102f455-ad74-4bf4-a0b8-8a2ec1d59514 for instance with vm_state building and task_state spawning. [ 827.475919] env[68638]: DEBUG nova.virt.hardware [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:28:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='1429185597',id=21,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-667115222',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 827.476189] env[68638]: DEBUG nova.virt.hardware [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 827.476444] env[68638]: DEBUG nova.virt.hardware [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 827.476852] env[68638]: DEBUG nova.virt.hardware [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 827.477096] env[68638]: DEBUG nova.virt.hardware [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 827.477238] env[68638]: DEBUG nova.virt.hardware [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 827.478141] env[68638]: DEBUG nova.virt.hardware [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 827.478338] env[68638]: DEBUG nova.virt.hardware [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 827.478534] env[68638]: DEBUG nova.virt.hardware [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 827.478710] env[68638]: DEBUG nova.virt.hardware [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 827.478887] env[68638]: DEBUG nova.virt.hardware [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 827.480360] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb84f562-d6f3-470b-93e7-e598d0cbd121 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.488764] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1218c89a-592c-4e86-96f7-15d26761ef4d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.526384] env[68638]: DEBUG nova.network.neutron [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Successfully updated port: 0102f455-ad74-4bf4-a0b8-8a2ec1d59514 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 827.588965] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3d7a959f-7fac-45f5-b15c-98a4449e1f2c tempest-ServersTestFqdnHostnames-278830155 tempest-ServersTestFqdnHostnames-278830155-project-member] Lock "1eee31b7-db8b-4765-8cc2-4273717ef86e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 56.742s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.716764] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e8e459c-7820-4f75-bf51-e206d9956835 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.740519] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ec4143-8ccb-46a0-a961-5617e6d87407 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.758552] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Updating instance 'a09c4492-34fd-4010-b547-bfb5b61f252d' progress to 67 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 828.029475] env[68638]: DEBUG oslo_concurrency.lockutils [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Acquiring lock "refresh_cache-27ff37a6-de93-4a4b-904f-a91fdb8b0aff" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.029704] env[68638]: DEBUG oslo_concurrency.lockutils [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Acquired lock "refresh_cache-27ff37a6-de93-4a4b-904f-a91fdb8b0aff" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 828.029890] env[68638]: DEBUG nova.network.neutron [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 828.324160] env[68638]: DEBUG nova.network.neutron [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Port 0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce binding to destination host cpu-1 is already ACTIVE {{(pid=68638) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 828.578141] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac1c7b0-d113-4e5e-9809-a90231963ac0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.586783] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa339330-2666-4403-90de-d269c64e8c2e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.619680] env[68638]: DEBUG nova.network.neutron [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 828.622959] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fcde28a-de7b-491c-a455-97fab6173e36 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.633352] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9da1230-3160-45ce-912c-44409470c51e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.650242] env[68638]: DEBUG nova.compute.provider_tree [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 828.921719] env[68638]: DEBUG nova.network.neutron [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Updating instance_info_cache with network_info: [{"id": "0102f455-ad74-4bf4-a0b8-8a2ec1d59514", "address": "fa:16:3e:1b:fc:3f", "network": {"id": "d9d5f06a-7963-44e3-8a0a-c839c3624857", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-336879211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d127964153f4854b10dfc8f8eb0009d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffcecdaa-a7b8-49fc-9371-dbdb7744688e", "external-id": "nsx-vlan-transportzone-994", "segmentation_id": 994, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0102f455-ad", "ovs_interfaceid": "0102f455-ad74-4bf4-a0b8-8a2ec1d59514", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.155256] env[68638]: DEBUG nova.scheduler.client.report [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 829.350420] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "a09c4492-34fd-4010-b547-bfb5b61f252d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 829.350648] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "a09c4492-34fd-4010-b547-bfb5b61f252d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.350824] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "a09c4492-34fd-4010-b547-bfb5b61f252d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.424284] env[68638]: DEBUG oslo_concurrency.lockutils [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Releasing lock "refresh_cache-27ff37a6-de93-4a4b-904f-a91fdb8b0aff" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 829.424687] env[68638]: DEBUG nova.compute.manager [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Instance network_info: |[{"id": "0102f455-ad74-4bf4-a0b8-8a2ec1d59514", "address": "fa:16:3e:1b:fc:3f", "network": {"id": "d9d5f06a-7963-44e3-8a0a-c839c3624857", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-336879211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d127964153f4854b10dfc8f8eb0009d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffcecdaa-a7b8-49fc-9371-dbdb7744688e", "external-id": "nsx-vlan-transportzone-994", "segmentation_id": 994, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0102f455-ad", "ovs_interfaceid": "0102f455-ad74-4bf4-a0b8-8a2ec1d59514", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 829.425155] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1b:fc:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ffcecdaa-a7b8-49fc-9371-dbdb7744688e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0102f455-ad74-4bf4-a0b8-8a2ec1d59514', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 829.433552] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 829.433808] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 829.434081] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-289f41c3-be2b-4994-b7c5-1cc4365ee7d2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.456284] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 829.456284] env[68638]: value = "task-2833637" [ 829.456284] env[68638]: _type = "Task" [ 829.456284] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.464329] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833637, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.522141] env[68638]: DEBUG nova.compute.manager [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 829.523078] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b57075-d745-4082-966b-9e4b8854da5c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.661909] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.616s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.662472] env[68638]: DEBUG nova.compute.manager [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 829.665584] env[68638]: DEBUG oslo_concurrency.lockutils [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 43.613s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.665805] env[68638]: DEBUG nova.objects.instance [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Lazy-loading 'resources' on Instance uuid da306fdd-a5b4-4275-a482-f77cc008d780 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 829.684646] env[68638]: DEBUG nova.compute.manager [req-aea78d10-7403-4619-a362-c076c2790b69 req-4be4d785-47d8-4f1e-9cf7-04f1d86335bd service nova] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Received event network-changed-0102f455-ad74-4bf4-a0b8-8a2ec1d59514 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 829.684646] env[68638]: DEBUG nova.compute.manager [req-aea78d10-7403-4619-a362-c076c2790b69 req-4be4d785-47d8-4f1e-9cf7-04f1d86335bd service nova] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Refreshing instance network info cache due to event network-changed-0102f455-ad74-4bf4-a0b8-8a2ec1d59514. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 829.684646] env[68638]: DEBUG oslo_concurrency.lockutils [req-aea78d10-7403-4619-a362-c076c2790b69 req-4be4d785-47d8-4f1e-9cf7-04f1d86335bd service nova] Acquiring lock "refresh_cache-27ff37a6-de93-4a4b-904f-a91fdb8b0aff" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.684646] env[68638]: DEBUG oslo_concurrency.lockutils [req-aea78d10-7403-4619-a362-c076c2790b69 req-4be4d785-47d8-4f1e-9cf7-04f1d86335bd service nova] Acquired lock "refresh_cache-27ff37a6-de93-4a4b-904f-a91fdb8b0aff" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 829.684646] env[68638]: DEBUG nova.network.neutron [req-aea78d10-7403-4619-a362-c076c2790b69 req-4be4d785-47d8-4f1e-9cf7-04f1d86335bd service nova] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Refreshing network info cache for port 0102f455-ad74-4bf4-a0b8-8a2ec1d59514 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 829.969561] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833637, 'name': CreateVM_Task, 'duration_secs': 0.413088} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.969901] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 829.970623] env[68638]: DEBUG oslo_concurrency.lockutils [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.970787] env[68638]: DEBUG oslo_concurrency.lockutils [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 829.971145] env[68638]: DEBUG oslo_concurrency.lockutils [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 829.971736] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87a11226-3436-4c13-b77a-b288544e67a8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.976763] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for the task: (returnval){ [ 829.976763] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52a28fad-fbe2-bf74-5e12-e217fc691b26" [ 829.976763] env[68638]: _type = "Task" [ 829.976763] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.985838] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a28fad-fbe2-bf74-5e12-e217fc691b26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.037618] env[68638]: INFO nova.compute.manager [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] instance snapshotting [ 830.037899] env[68638]: WARNING nova.compute.manager [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 830.042799] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b9471c-5f1a-4606-a596-a29d1af16b68 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.062529] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb708f4-98e9-4579-97ba-545c7a0556eb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.140984] env[68638]: DEBUG oslo_vmware.rw_handles [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5245149c-2e9e-de96-bc4a-9a48435f4ffb/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 830.141927] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95241fa8-8006-4764-9d3c-fe3eddd937e9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.148153] env[68638]: DEBUG oslo_vmware.rw_handles [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5245149c-2e9e-de96-bc4a-9a48435f4ffb/disk-0.vmdk is in state: ready. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 830.148910] env[68638]: ERROR oslo_vmware.rw_handles [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5245149c-2e9e-de96-bc4a-9a48435f4ffb/disk-0.vmdk due to incomplete transfer. [ 830.148910] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-20eb5f0c-f0b8-42bb-97a8-bd0718bbed85 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.157771] env[68638]: DEBUG oslo_vmware.rw_handles [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5245149c-2e9e-de96-bc4a-9a48435f4ffb/disk-0.vmdk. {{(pid=68638) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 830.157974] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Uploaded image dea7cdc8-5cef-4f0a-84c2-d56e00de0d61 to the Glance image server {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 830.160258] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Destroying the VM {{(pid=68638) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 830.160528] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-fc9f8736-3d3e-4beb-a1a8-df7b09267748 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.167268] env[68638]: DEBUG oslo_vmware.api [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 830.167268] env[68638]: value = "task-2833638" [ 830.167268] env[68638]: _type = "Task" [ 830.167268] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.168414] env[68638]: DEBUG nova.compute.utils [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 830.170254] env[68638]: DEBUG nova.compute.manager [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 830.170423] env[68638]: DEBUG nova.network.neutron [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 830.185034] env[68638]: DEBUG oslo_vmware.api [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833638, 'name': Destroy_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.235823] env[68638]: DEBUG nova.policy [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf59b87f634745d49969858624a7f9b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a20bc501951647abbd0c0d8e075312e2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 830.422019] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "refresh_cache-a09c4492-34fd-4010-b547-bfb5b61f252d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.422019] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquired lock "refresh_cache-a09c4492-34fd-4010-b547-bfb5b61f252d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.422019] env[68638]: DEBUG nova.network.neutron [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 830.467109] env[68638]: DEBUG nova.network.neutron [req-aea78d10-7403-4619-a362-c076c2790b69 req-4be4d785-47d8-4f1e-9cf7-04f1d86335bd service nova] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Updated VIF entry in instance network info cache for port 0102f455-ad74-4bf4-a0b8-8a2ec1d59514. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 830.467472] env[68638]: DEBUG nova.network.neutron [req-aea78d10-7403-4619-a362-c076c2790b69 req-4be4d785-47d8-4f1e-9cf7-04f1d86335bd service nova] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Updating instance_info_cache with network_info: [{"id": "0102f455-ad74-4bf4-a0b8-8a2ec1d59514", "address": "fa:16:3e:1b:fc:3f", "network": {"id": "d9d5f06a-7963-44e3-8a0a-c839c3624857", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-336879211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d127964153f4854b10dfc8f8eb0009d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffcecdaa-a7b8-49fc-9371-dbdb7744688e", "external-id": "nsx-vlan-transportzone-994", "segmentation_id": 994, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0102f455-ad", "ovs_interfaceid": "0102f455-ad74-4bf4-a0b8-8a2ec1d59514", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.489324] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a28fad-fbe2-bf74-5e12-e217fc691b26, 'name': SearchDatastore_Task, 'duration_secs': 0.010225} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.489627] env[68638]: DEBUG oslo_concurrency.lockutils [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 830.489860] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 830.490224] env[68638]: DEBUG oslo_concurrency.lockutils [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.490296] env[68638]: DEBUG oslo_concurrency.lockutils [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.490415] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 830.490676] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b4d64cbe-61dc-42e7-938f-7af2cbcd8c0b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.499315] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 830.499523] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 830.501084] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-185e2e23-32ec-47ba-b938-28b44cc7b021 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.507069] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for the task: (returnval){ [ 830.507069] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52654f60-dd95-6205-84d2-75c4138838bd" [ 830.507069] env[68638]: _type = "Task" [ 830.507069] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.515284] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52654f60-dd95-6205-84d2-75c4138838bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.577670] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Creating Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 830.578417] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5d262898-644c-45d5-810f-b19360d55119 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.587755] env[68638]: DEBUG oslo_vmware.api [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 830.587755] env[68638]: value = "task-2833639" [ 830.587755] env[68638]: _type = "Task" [ 830.587755] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.598252] env[68638]: DEBUG oslo_vmware.api [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833639, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.611580] env[68638]: DEBUG nova.network.neutron [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Successfully created port: 00c4f801-ad2b-4bfa-b69e-338c7f8e36bc {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 830.677112] env[68638]: DEBUG nova.compute.manager [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 830.700965] env[68638]: DEBUG oslo_vmware.api [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833638, 'name': Destroy_Task, 'duration_secs': 0.307854} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.703840] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Destroyed the VM [ 830.704502] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Deleting Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 830.704574] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-92209ba3-91c3-4e7b-9550-2ce329d79db3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.714424] env[68638]: DEBUG oslo_vmware.api [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 830.714424] env[68638]: value = "task-2833640" [ 830.714424] env[68638]: _type = "Task" [ 830.714424] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.720133] env[68638]: DEBUG oslo_vmware.api [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833640, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.722355] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e78c50-5a5c-4aca-9631-3d505a3c8d9a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.730221] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2533d6a6-c862-4e47-8efb-2256d2ed9632 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.764444] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f50e7ffa-3554-4b54-9ad8-b970d3ee2efc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.771641] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbfebda3-57a0-4711-bf76-493a4daf2dac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.787973] env[68638]: DEBUG nova.compute.provider_tree [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 830.970348] env[68638]: DEBUG oslo_concurrency.lockutils [req-aea78d10-7403-4619-a362-c076c2790b69 req-4be4d785-47d8-4f1e-9cf7-04f1d86335bd service nova] Releasing lock "refresh_cache-27ff37a6-de93-4a4b-904f-a91fdb8b0aff" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 830.996935] env[68638]: DEBUG nova.network.neutron [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Successfully created port: ceee0c90-edea-4bd9-ba2e-c7de2fe8b47b {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 831.018150] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52654f60-dd95-6205-84d2-75c4138838bd, 'name': SearchDatastore_Task, 'duration_secs': 0.008465} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.020078] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc25413f-597d-44e1-9775-7f5cedf9b21f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.025404] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for the task: (returnval){ [ 831.025404] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52031d16-4129-e749-1ecf-f1a878f00ffa" [ 831.025404] env[68638]: _type = "Task" [ 831.025404] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.033567] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52031d16-4129-e749-1ecf-f1a878f00ffa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.104682] env[68638]: DEBUG oslo_vmware.api [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833639, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.223875] env[68638]: DEBUG oslo_vmware.api [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833640, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.282843] env[68638]: DEBUG nova.network.neutron [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Updating instance_info_cache with network_info: [{"id": "0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce", "address": "fa:16:3e:e4:6e:8f", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.107", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e7e9cd6-7e", "ovs_interfaceid": "0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.291339] env[68638]: DEBUG nova.scheduler.client.report [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 831.536185] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52031d16-4129-e749-1ecf-f1a878f00ffa, 'name': SearchDatastore_Task, 'duration_secs': 0.009152} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.536445] env[68638]: DEBUG oslo_concurrency.lockutils [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.537368] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 27ff37a6-de93-4a4b-904f-a91fdb8b0aff/27ff37a6-de93-4a4b-904f-a91fdb8b0aff.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 831.537368] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0c44ef78-9cfa-40ba-a7b8-cc8a28400996 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.543014] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for the task: (returnval){ [ 831.543014] env[68638]: value = "task-2833641" [ 831.543014] env[68638]: _type = "Task" [ 831.543014] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.551350] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833641, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.595033] env[68638]: DEBUG oslo_vmware.api [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833639, 'name': CreateSnapshot_Task, 'duration_secs': 0.668429} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.595344] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Created Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 831.596115] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9d9df4-58ee-4b49-a3b8-a1cd8785e81a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.689787] env[68638]: DEBUG nova.compute.manager [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 831.718824] env[68638]: DEBUG nova.virt.hardware [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 831.719129] env[68638]: DEBUG nova.virt.hardware [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 831.719291] env[68638]: DEBUG nova.virt.hardware [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 831.719473] env[68638]: DEBUG nova.virt.hardware [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 831.719618] env[68638]: DEBUG nova.virt.hardware [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 831.719765] env[68638]: DEBUG nova.virt.hardware [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 831.719976] env[68638]: DEBUG nova.virt.hardware [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 831.720219] env[68638]: DEBUG nova.virt.hardware [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 831.720396] env[68638]: DEBUG nova.virt.hardware [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 831.720557] env[68638]: DEBUG nova.virt.hardware [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 831.720730] env[68638]: DEBUG nova.virt.hardware [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 831.722023] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d34f89-443a-4b42-9e30-0ec705d87046 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.734459] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d387f0e9-16a1-44a8-8a14-7ab5c468c97d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.738696] env[68638]: DEBUG oslo_vmware.api [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833640, 'name': RemoveSnapshot_Task, 'duration_secs': 0.928739} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.738997] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Deleted Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 831.739371] env[68638]: INFO nova.compute.manager [None req-4b8cf963-f886-4cb2-96be-1f30dd905ac1 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Took 14.63 seconds to snapshot the instance on the hypervisor. [ 831.786704] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Releasing lock "refresh_cache-a09c4492-34fd-4010-b547-bfb5b61f252d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.796717] env[68638]: DEBUG oslo_concurrency.lockutils [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.131s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.799553] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.342s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.801223] env[68638]: INFO nova.compute.claims [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 831.834546] env[68638]: INFO nova.scheduler.client.report [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Deleted allocations for instance da306fdd-a5b4-4275-a482-f77cc008d780 [ 832.059168] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833641, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469873} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.059450] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 27ff37a6-de93-4a4b-904f-a91fdb8b0aff/27ff37a6-de93-4a4b-904f-a91fdb8b0aff.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 832.059674] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 832.059992] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7e3b1349-44f9-4a78-83e4-c16c2a5cd287 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.066612] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for the task: (returnval){ [ 832.066612] env[68638]: value = "task-2833642" [ 832.066612] env[68638]: _type = "Task" [ 832.066612] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.076227] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833642, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.116044] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Creating linked-clone VM from snapshot {{(pid=68638) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 832.116387] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-865089d0-a2e7-47e6-89c9-7284555ccd44 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.127659] env[68638]: DEBUG oslo_vmware.api [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 832.127659] env[68638]: value = "task-2833643" [ 832.127659] env[68638]: _type = "Task" [ 832.127659] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.138237] env[68638]: DEBUG oslo_vmware.api [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833643, 'name': CloneVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.312582] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e384de87-c8de-4e79-ae47-23e19691cb7c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.334700] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb8a2eb6-de4a-4def-ac94-5c665d836d99 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.343851] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Updating instance 'a09c4492-34fd-4010-b547-bfb5b61f252d' progress to 83 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 832.347691] env[68638]: DEBUG oslo_concurrency.lockutils [None req-42304ab9-7812-4d8a-99a3-66686b393093 tempest-ServerGroupTestJSON-1115903510 tempest-ServerGroupTestJSON-1115903510-project-member] Lock "da306fdd-a5b4-4275-a482-f77cc008d780" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.921s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.580513] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833642, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.20251} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.580743] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 832.581558] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4ca77f-902f-4da1-b8c4-f5536ad53fdc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.607132] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] 27ff37a6-de93-4a4b-904f-a91fdb8b0aff/27ff37a6-de93-4a4b-904f-a91fdb8b0aff.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 832.607132] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-881d53e9-34ae-4559-898d-a11708d029ad {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.625917] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for the task: (returnval){ [ 832.625917] env[68638]: value = "task-2833644" [ 832.625917] env[68638]: _type = "Task" [ 832.625917] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.634670] env[68638]: DEBUG nova.compute.manager [req-f0e9ce22-bf24-4b2a-9d99-5d58216f4c0c req-37cedf18-ed0a-4a4c-85ec-b59cce03f044 service nova] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Received event network-vif-plugged-00c4f801-ad2b-4bfa-b69e-338c7f8e36bc {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 832.634918] env[68638]: DEBUG oslo_concurrency.lockutils [req-f0e9ce22-bf24-4b2a-9d99-5d58216f4c0c req-37cedf18-ed0a-4a4c-85ec-b59cce03f044 service nova] Acquiring lock "a98f0c63-d327-47b9-b0c2-f7790f1ae87d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.635090] env[68638]: DEBUG oslo_concurrency.lockutils [req-f0e9ce22-bf24-4b2a-9d99-5d58216f4c0c req-37cedf18-ed0a-4a4c-85ec-b59cce03f044 service nova] Lock "a98f0c63-d327-47b9-b0c2-f7790f1ae87d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.637299] env[68638]: DEBUG oslo_concurrency.lockutils [req-f0e9ce22-bf24-4b2a-9d99-5d58216f4c0c req-37cedf18-ed0a-4a4c-85ec-b59cce03f044 service nova] Lock "a98f0c63-d327-47b9-b0c2-f7790f1ae87d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.637299] env[68638]: DEBUG nova.compute.manager [req-f0e9ce22-bf24-4b2a-9d99-5d58216f4c0c req-37cedf18-ed0a-4a4c-85ec-b59cce03f044 service nova] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] No waiting events found dispatching network-vif-plugged-00c4f801-ad2b-4bfa-b69e-338c7f8e36bc {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 832.637299] env[68638]: WARNING nova.compute.manager [req-f0e9ce22-bf24-4b2a-9d99-5d58216f4c0c req-37cedf18-ed0a-4a4c-85ec-b59cce03f044 service nova] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Received unexpected event network-vif-plugged-00c4f801-ad2b-4bfa-b69e-338c7f8e36bc for instance with vm_state building and task_state spawning. [ 832.642294] env[68638]: DEBUG oslo_vmware.api [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833643, 'name': CloneVM_Task} progress is 93%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.642547] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833644, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.737742] env[68638]: DEBUG nova.network.neutron [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Successfully updated port: 00c4f801-ad2b-4bfa-b69e-338c7f8e36bc {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 832.854020] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 832.854436] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2c2eaab1-b36f-4254-8ac9-b3195c5ba216 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.863865] env[68638]: DEBUG oslo_vmware.api [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 832.863865] env[68638]: value = "task-2833645" [ 832.863865] env[68638]: _type = "Task" [ 832.863865] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.875276] env[68638]: DEBUG oslo_vmware.api [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833645, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.147480] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833644, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.151013] env[68638]: DEBUG oslo_vmware.api [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833643, 'name': CloneVM_Task} progress is 93%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.350664] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-217e1a4d-6d3a-4ded-834e-e2137a617826 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.360649] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ff0376-541e-4dc9-acbb-e9eb832e70f3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.393451] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-989ac9a6-da22-4216-8e68-fad9525e06cd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.399283] env[68638]: DEBUG oslo_vmware.api [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833645, 'name': PowerOnVM_Task, 'duration_secs': 0.373793} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.399933] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 833.400138] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c661acf9-2c73-498b-b27f-6295ed3c8808 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Updating instance 'a09c4492-34fd-4010-b547-bfb5b61f252d' progress to 100 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 833.406907] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5423e7f3-1bfd-4a5d-9cd0-2a0b0d3e7a1a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.425704] env[68638]: DEBUG nova.compute.provider_tree [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 833.636764] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833644, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.646034] env[68638]: DEBUG oslo_vmware.api [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833643, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.935453] env[68638]: DEBUG nova.scheduler.client.report [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 834.048564] env[68638]: DEBUG nova.compute.manager [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 834.049397] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ccbd29-6a8c-41fc-84ac-78bf692c76ee {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.137353] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833644, 'name': ReconfigVM_Task, 'duration_secs': 1.100371} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.142911] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Reconfigured VM instance instance-0000003b to attach disk [datastore1] 27ff37a6-de93-4a4b-904f-a91fdb8b0aff/27ff37a6-de93-4a4b-904f-a91fdb8b0aff.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 834.143117] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=68638) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 834.143814] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-cd01cfe6-e8c6-4f11-85eb-16b96f4ab988 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.152893] env[68638]: DEBUG oslo_vmware.api [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833643, 'name': CloneVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.154744] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for the task: (returnval){ [ 834.154744] env[68638]: value = "task-2833646" [ 834.154744] env[68638]: _type = "Task" [ 834.154744] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.166290] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833646, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.441738] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.642s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.442377] env[68638]: DEBUG nova.compute.manager [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 834.445206] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.095s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.447712] env[68638]: INFO nova.compute.claims [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 834.546155] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "9ba0f737-7947-409c-9163-79d621a29285" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.548063] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "9ba0f737-7947-409c-9163-79d621a29285" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.563449] env[68638]: INFO nova.compute.manager [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] instance snapshotting [ 834.567141] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3469b0-adcc-4919-bfc2-450d8d254bf1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.589324] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-265efaf9-759e-469a-9d9c-711463a795d4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.648134] env[68638]: DEBUG oslo_vmware.api [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833643, 'name': CloneVM_Task, 'duration_secs': 2.058879} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.648423] env[68638]: INFO nova.virt.vmwareapi.vmops [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Created linked-clone VM from snapshot [ 834.649224] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a955fbc-9849-4ae7-83e1-a5ab0488feb6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.657658] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Uploading image 380d98bc-ca07-48a4-9708-7df38f3a8d75 {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 834.668706] env[68638]: DEBUG nova.compute.manager [req-7665b6b0-c36d-4dfc-8d81-76bd9dee1470 req-bc0c3962-cec4-4a12-88f7-87434c289325 service nova] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Received event network-changed-00c4f801-ad2b-4bfa-b69e-338c7f8e36bc {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 834.668899] env[68638]: DEBUG nova.compute.manager [req-7665b6b0-c36d-4dfc-8d81-76bd9dee1470 req-bc0c3962-cec4-4a12-88f7-87434c289325 service nova] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Refreshing instance network info cache due to event network-changed-00c4f801-ad2b-4bfa-b69e-338c7f8e36bc. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 834.669133] env[68638]: DEBUG oslo_concurrency.lockutils [req-7665b6b0-c36d-4dfc-8d81-76bd9dee1470 req-bc0c3962-cec4-4a12-88f7-87434c289325 service nova] Acquiring lock "refresh_cache-a98f0c63-d327-47b9-b0c2-f7790f1ae87d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.669279] env[68638]: DEBUG oslo_concurrency.lockutils [req-7665b6b0-c36d-4dfc-8d81-76bd9dee1470 req-bc0c3962-cec4-4a12-88f7-87434c289325 service nova] Acquired lock "refresh_cache-a98f0c63-d327-47b9-b0c2-f7790f1ae87d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.669535] env[68638]: DEBUG nova.network.neutron [req-7665b6b0-c36d-4dfc-8d81-76bd9dee1470 req-bc0c3962-cec4-4a12-88f7-87434c289325 service nova] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Refreshing network info cache for port 00c4f801-ad2b-4bfa-b69e-338c7f8e36bc {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 834.678272] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833646, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.055623} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.679925] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=68638) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 834.679925] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e39afa0a-6a92-4193-8d83-fd6ef2fcc81c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.715228] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] 27ff37a6-de93-4a4b-904f-a91fdb8b0aff/ephemeral_0.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 834.717345] env[68638]: DEBUG oslo_vmware.rw_handles [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 834.717345] env[68638]: value = "vm-569906" [ 834.717345] env[68638]: _type = "VirtualMachine" [ 834.717345] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 834.718870] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e853a340-480b-466b-ab0a-575687fc230f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.731783] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-b9252f8d-bb88-4361-891b-73cfb750fac7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.741233] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for the task: (returnval){ [ 834.741233] env[68638]: value = "task-2833647" [ 834.741233] env[68638]: _type = "Task" [ 834.741233] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.742645] env[68638]: DEBUG oslo_vmware.rw_handles [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lease: (returnval){ [ 834.742645] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]527f6708-c4e4-20ed-7e62-437a2abfc225" [ 834.742645] env[68638]: _type = "HttpNfcLease" [ 834.742645] env[68638]: } obtained for exporting VM: (result){ [ 834.742645] env[68638]: value = "vm-569906" [ 834.742645] env[68638]: _type = "VirtualMachine" [ 834.742645] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 834.743085] env[68638]: DEBUG oslo_vmware.api [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the lease: (returnval){ [ 834.743085] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]527f6708-c4e4-20ed-7e62-437a2abfc225" [ 834.743085] env[68638]: _type = "HttpNfcLease" [ 834.743085] env[68638]: } to be ready. {{(pid=68638) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 834.757032] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833647, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.758531] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 834.758531] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]527f6708-c4e4-20ed-7e62-437a2abfc225" [ 834.758531] env[68638]: _type = "HttpNfcLease" [ 834.758531] env[68638]: } is initializing. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 834.865633] env[68638]: DEBUG nova.network.neutron [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Successfully updated port: ceee0c90-edea-4bd9-ba2e-c7de2fe8b47b {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 834.953555] env[68638]: DEBUG nova.compute.utils [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 834.957847] env[68638]: DEBUG nova.compute.manager [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 834.957847] env[68638]: DEBUG nova.network.neutron [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 835.015288] env[68638]: DEBUG nova.policy [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'adf3da1f81694585b727a7b0528dfeb3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '46bace7ece424608bf9f88293ba6364c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 835.099872] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Creating Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 835.100240] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-97408776-c51b-4e66-8cd3-482ab08577cb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.110660] env[68638]: DEBUG oslo_vmware.api [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 835.110660] env[68638]: value = "task-2833649" [ 835.110660] env[68638]: _type = "Task" [ 835.110660] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.120692] env[68638]: DEBUG oslo_vmware.api [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833649, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.228481] env[68638]: DEBUG nova.network.neutron [req-7665b6b0-c36d-4dfc-8d81-76bd9dee1470 req-bc0c3962-cec4-4a12-88f7-87434c289325 service nova] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 835.261747] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 835.261747] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]527f6708-c4e4-20ed-7e62-437a2abfc225" [ 835.261747] env[68638]: _type = "HttpNfcLease" [ 835.261747] env[68638]: } is ready. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 835.261747] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833647, 'name': ReconfigVM_Task, 'duration_secs': 0.301625} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.261747] env[68638]: DEBUG oslo_vmware.rw_handles [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 835.261747] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]527f6708-c4e4-20ed-7e62-437a2abfc225" [ 835.261747] env[68638]: _type = "HttpNfcLease" [ 835.261747] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 835.261747] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Reconfigured VM instance instance-0000003b to attach disk [datastore1] 27ff37a6-de93-4a4b-904f-a91fdb8b0aff/ephemeral_0.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 835.262770] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e8d473-5313-4255-a08f-178811dd6398 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.266897] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-58b39079-e6d2-45cc-a472-d72fc4e4f928 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.275510] env[68638]: DEBUG oslo_vmware.rw_handles [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529b1a93-97c9-fd64-42a4-0de2663e7c2f/disk-0.vmdk from lease info. {{(pid=68638) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 835.275715] env[68638]: DEBUG oslo_vmware.rw_handles [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529b1a93-97c9-fd64-42a4-0de2663e7c2f/disk-0.vmdk for reading. {{(pid=68638) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 835.278457] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for the task: (returnval){ [ 835.278457] env[68638]: value = "task-2833650" [ 835.278457] env[68638]: _type = "Task" [ 835.278457] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.346523] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833650, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.351023] env[68638]: DEBUG nova.network.neutron [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Successfully created port: 6f9df10a-d887-489b-b7e7-a3305f8c5c9e {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 835.369835] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Acquiring lock "refresh_cache-a98f0c63-d327-47b9-b0c2-f7790f1ae87d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.383612] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-adb07f4b-2669-45a6-a869-6832cffcfe7f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.458969] env[68638]: DEBUG nova.compute.manager [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 835.621795] env[68638]: DEBUG oslo_vmware.api [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833649, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.713417] env[68638]: DEBUG nova.network.neutron [req-7665b6b0-c36d-4dfc-8d81-76bd9dee1470 req-bc0c3962-cec4-4a12-88f7-87434c289325 service nova] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.793131] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833650, 'name': Rename_Task, 'duration_secs': 0.200303} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.797184] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 835.797812] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0c993d90-404b-4b70-b295-b5365a55dba9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.806572] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for the task: (returnval){ [ 835.806572] env[68638]: value = "task-2833651" [ 835.806572] env[68638]: _type = "Task" [ 835.806572] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.818571] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833651, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.083151] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df1ff085-6628-492c-9401-df49b932ea3d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.092821] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8331825d-44d9-4c5e-bf35-121078468815 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.136257] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b3eeef-9923-429d-a962-60075526b6c9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.146309] env[68638]: DEBUG oslo_vmware.api [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833649, 'name': CreateSnapshot_Task, 'duration_secs': 0.587321} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.147480] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Created Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 836.148069] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6009e86-e5cf-4de5-bec8-ae1e209740aa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.152984] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b523830-9a0e-4a8e-ae72-8cd5fd544d27 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.174459] env[68638]: DEBUG nova.compute.provider_tree [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 836.218801] env[68638]: DEBUG oslo_concurrency.lockutils [req-7665b6b0-c36d-4dfc-8d81-76bd9dee1470 req-bc0c3962-cec4-4a12-88f7-87434c289325 service nova] Releasing lock "refresh_cache-a98f0c63-d327-47b9-b0c2-f7790f1ae87d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 836.219247] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Acquired lock "refresh_cache-a98f0c63-d327-47b9-b0c2-f7790f1ae87d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 836.219491] env[68638]: DEBUG nova.network.neutron [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 836.321702] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833651, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.474166] env[68638]: DEBUG nova.compute.manager [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 836.502321] env[68638]: DEBUG nova.virt.hardware [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 836.504746] env[68638]: DEBUG nova.virt.hardware [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 836.504746] env[68638]: DEBUG nova.virt.hardware [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 836.504746] env[68638]: DEBUG nova.virt.hardware [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 836.504746] env[68638]: DEBUG nova.virt.hardware [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 836.504746] env[68638]: DEBUG nova.virt.hardware [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 836.504746] env[68638]: DEBUG nova.virt.hardware [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 836.504746] env[68638]: DEBUG nova.virt.hardware [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 836.504746] env[68638]: DEBUG nova.virt.hardware [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 836.504746] env[68638]: DEBUG nova.virt.hardware [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 836.505378] env[68638]: DEBUG nova.virt.hardware [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 836.505793] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd88e3bd-6dcc-4a90-ad44-257b97b78c35 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.515590] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae3f2ada-a448-4cfc-9fec-051c11d39f6d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.644069] env[68638]: DEBUG nova.network.neutron [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Port 0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce binding to destination host cpu-1 is already ACTIVE {{(pid=68638) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 836.644375] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "refresh_cache-a09c4492-34fd-4010-b547-bfb5b61f252d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.644569] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquired lock "refresh_cache-a09c4492-34fd-4010-b547-bfb5b61f252d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 836.644706] env[68638]: DEBUG nova.network.neutron [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 836.673648] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Creating linked-clone VM from snapshot {{(pid=68638) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 836.674332] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7937b6cd-50c3-40e8-af67-f3862a0672bd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.678292] env[68638]: DEBUG nova.scheduler.client.report [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 836.690365] env[68638]: DEBUG oslo_vmware.api [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 836.690365] env[68638]: value = "task-2833652" [ 836.690365] env[68638]: _type = "Task" [ 836.690365] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.694087] env[68638]: DEBUG nova.compute.manager [req-c4468e35-c3b5-4eb6-87be-5ce72e79693f req-f1966348-2779-4ef6-aae7-93967c090700 service nova] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Received event network-vif-plugged-ceee0c90-edea-4bd9-ba2e-c7de2fe8b47b {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 836.694432] env[68638]: DEBUG oslo_concurrency.lockutils [req-c4468e35-c3b5-4eb6-87be-5ce72e79693f req-f1966348-2779-4ef6-aae7-93967c090700 service nova] Acquiring lock "a98f0c63-d327-47b9-b0c2-f7790f1ae87d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 836.694763] env[68638]: DEBUG oslo_concurrency.lockutils [req-c4468e35-c3b5-4eb6-87be-5ce72e79693f req-f1966348-2779-4ef6-aae7-93967c090700 service nova] Lock "a98f0c63-d327-47b9-b0c2-f7790f1ae87d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.694977] env[68638]: DEBUG oslo_concurrency.lockutils [req-c4468e35-c3b5-4eb6-87be-5ce72e79693f req-f1966348-2779-4ef6-aae7-93967c090700 service nova] Lock "a98f0c63-d327-47b9-b0c2-f7790f1ae87d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.695210] env[68638]: DEBUG nova.compute.manager [req-c4468e35-c3b5-4eb6-87be-5ce72e79693f req-f1966348-2779-4ef6-aae7-93967c090700 service nova] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] No waiting events found dispatching network-vif-plugged-ceee0c90-edea-4bd9-ba2e-c7de2fe8b47b {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 836.695424] env[68638]: WARNING nova.compute.manager [req-c4468e35-c3b5-4eb6-87be-5ce72e79693f req-f1966348-2779-4ef6-aae7-93967c090700 service nova] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Received unexpected event network-vif-plugged-ceee0c90-edea-4bd9-ba2e-c7de2fe8b47b for instance with vm_state building and task_state spawning. [ 836.695788] env[68638]: DEBUG nova.compute.manager [req-c4468e35-c3b5-4eb6-87be-5ce72e79693f req-f1966348-2779-4ef6-aae7-93967c090700 service nova] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Received event network-changed-ceee0c90-edea-4bd9-ba2e-c7de2fe8b47b {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 836.696043] env[68638]: DEBUG nova.compute.manager [req-c4468e35-c3b5-4eb6-87be-5ce72e79693f req-f1966348-2779-4ef6-aae7-93967c090700 service nova] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Refreshing instance network info cache due to event network-changed-ceee0c90-edea-4bd9-ba2e-c7de2fe8b47b. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 836.696279] env[68638]: DEBUG oslo_concurrency.lockutils [req-c4468e35-c3b5-4eb6-87be-5ce72e79693f req-f1966348-2779-4ef6-aae7-93967c090700 service nova] Acquiring lock "refresh_cache-a98f0c63-d327-47b9-b0c2-f7790f1ae87d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.710806] env[68638]: DEBUG oslo_vmware.api [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833652, 'name': CloneVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.761896] env[68638]: DEBUG nova.network.neutron [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 836.823568] env[68638]: DEBUG oslo_vmware.api [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833651, 'name': PowerOnVM_Task, 'duration_secs': 0.80202} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.823907] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 836.824177] env[68638]: INFO nova.compute.manager [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Took 9.37 seconds to spawn the instance on the hypervisor. [ 836.824402] env[68638]: DEBUG nova.compute.manager [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 836.825348] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2622c78-0329-4f7a-be85-b99609b0c7c5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.131512] env[68638]: DEBUG nova.network.neutron [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Updating instance_info_cache with network_info: [{"id": "00c4f801-ad2b-4bfa-b69e-338c7f8e36bc", "address": "fa:16:3e:f8:26:4e", "network": {"id": "65ad00bc-351b-475f-b623-5d9a5cd72a77", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1277410382", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.209", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a20bc501951647abbd0c0d8e075312e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92233552-2c0c-416e-9bf3-bfcca8eda2dc", "external-id": "nsx-vlan-transportzone-251", "segmentation_id": 251, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00c4f801-ad", "ovs_interfaceid": "00c4f801-ad2b-4bfa-b69e-338c7f8e36bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ceee0c90-edea-4bd9-ba2e-c7de2fe8b47b", "address": "fa:16:3e:93:9f:f4", "network": {"id": "f13d4c6c-df33-47b8-a0a1-66edeb848072", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-192602891", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.140", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a20bc501951647abbd0c0d8e075312e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapceee0c90-ed", "ovs_interfaceid": "ceee0c90-edea-4bd9-ba2e-c7de2fe8b47b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.183739] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.738s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.184193] env[68638]: DEBUG nova.compute.manager [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 837.187537] env[68638]: DEBUG oslo_concurrency.lockutils [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.433s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 837.187765] env[68638]: DEBUG nova.objects.instance [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Lazy-loading 'resources' on Instance uuid be761cf1-0949-42c0-8a38-58af33113a03 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 837.203655] env[68638]: DEBUG oslo_vmware.api [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833652, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.229339] env[68638]: DEBUG nova.network.neutron [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Successfully updated port: 6f9df10a-d887-489b-b7e7-a3305f8c5c9e {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 837.351970] env[68638]: INFO nova.compute.manager [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Took 63.00 seconds to build instance. [ 837.432891] env[68638]: DEBUG nova.network.neutron [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Updating instance_info_cache with network_info: [{"id": "0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce", "address": "fa:16:3e:e4:6e:8f", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.107", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e7e9cd6-7e", "ovs_interfaceid": "0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.641022] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Releasing lock "refresh_cache-a98f0c63-d327-47b9-b0c2-f7790f1ae87d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 837.641022] env[68638]: DEBUG nova.compute.manager [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Instance network_info: |[{"id": "00c4f801-ad2b-4bfa-b69e-338c7f8e36bc", "address": "fa:16:3e:f8:26:4e", "network": {"id": "65ad00bc-351b-475f-b623-5d9a5cd72a77", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1277410382", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.209", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a20bc501951647abbd0c0d8e075312e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92233552-2c0c-416e-9bf3-bfcca8eda2dc", "external-id": "nsx-vlan-transportzone-251", "segmentation_id": 251, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00c4f801-ad", "ovs_interfaceid": "00c4f801-ad2b-4bfa-b69e-338c7f8e36bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ceee0c90-edea-4bd9-ba2e-c7de2fe8b47b", "address": "fa:16:3e:93:9f:f4", "network": {"id": "f13d4c6c-df33-47b8-a0a1-66edeb848072", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-192602891", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.140", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a20bc501951647abbd0c0d8e075312e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapceee0c90-ed", "ovs_interfaceid": "ceee0c90-edea-4bd9-ba2e-c7de2fe8b47b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 837.641022] env[68638]: DEBUG oslo_concurrency.lockutils [req-c4468e35-c3b5-4eb6-87be-5ce72e79693f req-f1966348-2779-4ef6-aae7-93967c090700 service nova] Acquired lock "refresh_cache-a98f0c63-d327-47b9-b0c2-f7790f1ae87d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 837.641022] env[68638]: DEBUG nova.network.neutron [req-c4468e35-c3b5-4eb6-87be-5ce72e79693f req-f1966348-2779-4ef6-aae7-93967c090700 service nova] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Refreshing network info cache for port ceee0c90-edea-4bd9-ba2e-c7de2fe8b47b {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 837.641022] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:26:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92233552-2c0c-416e-9bf3-bfcca8eda2dc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '00c4f801-ad2b-4bfa-b69e-338c7f8e36bc', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:9f:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aec0089a-ff85-4bef-bad8-c84de39af71a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ceee0c90-edea-4bd9-ba2e-c7de2fe8b47b', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 837.651436] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 837.655571] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 837.656322] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c6f15eb0-17f1-414f-a2cd-b4211d3dae05 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.683169] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 837.683169] env[68638]: value = "task-2833653" [ 837.683169] env[68638]: _type = "Task" [ 837.683169] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.690844] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833653, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.696024] env[68638]: DEBUG nova.compute.utils [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 837.698852] env[68638]: DEBUG nova.compute.manager [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 837.699214] env[68638]: DEBUG nova.network.neutron [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 837.712018] env[68638]: DEBUG oslo_vmware.api [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833652, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.732023] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquiring lock "refresh_cache-ee752ace-fa19-4fd7-af89-f6628ce3d087" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.732023] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquired lock "refresh_cache-ee752ace-fa19-4fd7-af89-f6628ce3d087" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 837.732023] env[68638]: DEBUG nova.network.neutron [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 837.785135] env[68638]: DEBUG nova.policy [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5053ed9baa1a4a7094bb540d135829bc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd4ee3bf60f3a4d8f99f2fa20b6f13792', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 837.855804] env[68638]: DEBUG oslo_concurrency.lockutils [None req-afad8543-5c65-4b6c-a54d-13b7bd8f3c4a tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Lock "27ff37a6-de93-4a4b-904f-a91fdb8b0aff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 114.525s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.937567] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Releasing lock "refresh_cache-a09c4492-34fd-4010-b547-bfb5b61f252d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 838.021290] env[68638]: DEBUG nova.network.neutron [req-c4468e35-c3b5-4eb6-87be-5ce72e79693f req-f1966348-2779-4ef6-aae7-93967c090700 service nova] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Updated VIF entry in instance network info cache for port ceee0c90-edea-4bd9-ba2e-c7de2fe8b47b. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 838.021290] env[68638]: DEBUG nova.network.neutron [req-c4468e35-c3b5-4eb6-87be-5ce72e79693f req-f1966348-2779-4ef6-aae7-93967c090700 service nova] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Updating instance_info_cache with network_info: [{"id": "00c4f801-ad2b-4bfa-b69e-338c7f8e36bc", "address": "fa:16:3e:f8:26:4e", "network": {"id": "65ad00bc-351b-475f-b623-5d9a5cd72a77", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1277410382", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.209", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a20bc501951647abbd0c0d8e075312e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92233552-2c0c-416e-9bf3-bfcca8eda2dc", "external-id": "nsx-vlan-transportzone-251", "segmentation_id": 251, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00c4f801-ad", "ovs_interfaceid": "00c4f801-ad2b-4bfa-b69e-338c7f8e36bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ceee0c90-edea-4bd9-ba2e-c7de2fe8b47b", "address": "fa:16:3e:93:9f:f4", "network": {"id": "f13d4c6c-df33-47b8-a0a1-66edeb848072", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-192602891", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.140", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a20bc501951647abbd0c0d8e075312e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapceee0c90-ed", "ovs_interfaceid": "ceee0c90-edea-4bd9-ba2e-c7de2fe8b47b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.196071] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833653, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.200697] env[68638]: DEBUG nova.compute.manager [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 838.217241] env[68638]: DEBUG oslo_vmware.api [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833652, 'name': CloneVM_Task} progress is 95%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.282113] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74923bfe-2417-43fe-a6b6-e7b2fdca5d57 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.291957] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a301dec-882c-4624-90c9-29f82aee5895 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.294736] env[68638]: DEBUG nova.network.neutron [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 838.331324] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca541023-e83f-4de3-a3ab-f1f5fe4c84df {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.345204] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2acfd4f1-ea6c-4fcc-8631-0d3cb6b381b6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.364375] env[68638]: DEBUG nova.compute.manager [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 838.370454] env[68638]: DEBUG nova.compute.provider_tree [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 838.444349] env[68638]: DEBUG nova.compute.manager [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=68638) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 838.444588] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.477408] env[68638]: DEBUG nova.network.neutron [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Successfully created port: c999665e-f15e-46cf-9d3c-b7252ab6a96a {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 838.524038] env[68638]: DEBUG oslo_concurrency.lockutils [req-c4468e35-c3b5-4eb6-87be-5ce72e79693f req-f1966348-2779-4ef6-aae7-93967c090700 service nova] Releasing lock "refresh_cache-a98f0c63-d327-47b9-b0c2-f7790f1ae87d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 838.560625] env[68638]: DEBUG nova.network.neutron [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Updating instance_info_cache with network_info: [{"id": "6f9df10a-d887-489b-b7e7-a3305f8c5c9e", "address": "fa:16:3e:56:fb:a6", "network": {"id": "2169592a-fe21-46a9-8c91-c7e04f04504e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1092388301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "46bace7ece424608bf9f88293ba6364c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b36c5ae6-c344-4bd1-8239-29128e2bbfbf", "external-id": "nsx-vlan-transportzone-214", "segmentation_id": 214, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f9df10a-d8", "ovs_interfaceid": "6f9df10a-d887-489b-b7e7-a3305f8c5c9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.696448] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833653, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.708471] env[68638]: DEBUG oslo_vmware.api [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833652, 'name': CloneVM_Task} progress is 95%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.711084] env[68638]: INFO nova.virt.block_device [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Booting with volume 4dc574c0-0283-4f21-ac01-f714b10306da at /dev/sda [ 838.768152] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9dbfa6c1-f8de-4b9a-89a7-64a36ae40a30 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.777298] env[68638]: DEBUG nova.compute.manager [req-c8386ea3-f26f-4efb-bf84-c619f27d9660 req-2fba9efd-e2ae-4539-85ec-b528c2ae8868 service nova] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Received event network-vif-plugged-6f9df10a-d887-489b-b7e7-a3305f8c5c9e {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 838.777298] env[68638]: DEBUG oslo_concurrency.lockutils [req-c8386ea3-f26f-4efb-bf84-c619f27d9660 req-2fba9efd-e2ae-4539-85ec-b528c2ae8868 service nova] Acquiring lock "ee752ace-fa19-4fd7-af89-f6628ce3d087-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.777298] env[68638]: DEBUG oslo_concurrency.lockutils [req-c8386ea3-f26f-4efb-bf84-c619f27d9660 req-2fba9efd-e2ae-4539-85ec-b528c2ae8868 service nova] Lock "ee752ace-fa19-4fd7-af89-f6628ce3d087-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.777544] env[68638]: DEBUG oslo_concurrency.lockutils [req-c8386ea3-f26f-4efb-bf84-c619f27d9660 req-2fba9efd-e2ae-4539-85ec-b528c2ae8868 service nova] Lock "ee752ace-fa19-4fd7-af89-f6628ce3d087-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.778049] env[68638]: DEBUG nova.compute.manager [req-c8386ea3-f26f-4efb-bf84-c619f27d9660 req-2fba9efd-e2ae-4539-85ec-b528c2ae8868 service nova] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] No waiting events found dispatching network-vif-plugged-6f9df10a-d887-489b-b7e7-a3305f8c5c9e {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 838.778049] env[68638]: WARNING nova.compute.manager [req-c8386ea3-f26f-4efb-bf84-c619f27d9660 req-2fba9efd-e2ae-4539-85ec-b528c2ae8868 service nova] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Received unexpected event network-vif-plugged-6f9df10a-d887-489b-b7e7-a3305f8c5c9e for instance with vm_state building and task_state spawning. [ 838.778192] env[68638]: DEBUG nova.compute.manager [req-c8386ea3-f26f-4efb-bf84-c619f27d9660 req-2fba9efd-e2ae-4539-85ec-b528c2ae8868 service nova] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Received event network-changed-6f9df10a-d887-489b-b7e7-a3305f8c5c9e {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 838.778384] env[68638]: DEBUG nova.compute.manager [req-c8386ea3-f26f-4efb-bf84-c619f27d9660 req-2fba9efd-e2ae-4539-85ec-b528c2ae8868 service nova] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Refreshing instance network info cache due to event network-changed-6f9df10a-d887-489b-b7e7-a3305f8c5c9e. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 838.778724] env[68638]: DEBUG oslo_concurrency.lockutils [req-c8386ea3-f26f-4efb-bf84-c619f27d9660 req-2fba9efd-e2ae-4539-85ec-b528c2ae8868 service nova] Acquiring lock "refresh_cache-ee752ace-fa19-4fd7-af89-f6628ce3d087" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.783639] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88202119-d6d7-4b3d-ba49-1eb923911aec {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.801140] env[68638]: DEBUG nova.compute.manager [req-b063bcee-25ef-46e0-a678-9adeef327061 req-1c63b4bb-cb35-4af7-af8b-9d88e3304be8 service nova] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Received event network-changed-0102f455-ad74-4bf4-a0b8-8a2ec1d59514 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 838.801612] env[68638]: DEBUG nova.compute.manager [req-b063bcee-25ef-46e0-a678-9adeef327061 req-1c63b4bb-cb35-4af7-af8b-9d88e3304be8 service nova] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Refreshing instance network info cache due to event network-changed-0102f455-ad74-4bf4-a0b8-8a2ec1d59514. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 838.801663] env[68638]: DEBUG oslo_concurrency.lockutils [req-b063bcee-25ef-46e0-a678-9adeef327061 req-1c63b4bb-cb35-4af7-af8b-9d88e3304be8 service nova] Acquiring lock "refresh_cache-27ff37a6-de93-4a4b-904f-a91fdb8b0aff" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.801843] env[68638]: DEBUG oslo_concurrency.lockutils [req-b063bcee-25ef-46e0-a678-9adeef327061 req-1c63b4bb-cb35-4af7-af8b-9d88e3304be8 service nova] Acquired lock "refresh_cache-27ff37a6-de93-4a4b-904f-a91fdb8b0aff" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 838.802079] env[68638]: DEBUG nova.network.neutron [req-b063bcee-25ef-46e0-a678-9adeef327061 req-1c63b4bb-cb35-4af7-af8b-9d88e3304be8 service nova] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Refreshing network info cache for port 0102f455-ad74-4bf4-a0b8-8a2ec1d59514 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 838.840307] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d41a97b3-6f70-4ea7-b742-8f12fc29a8fa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.852012] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a484a12-d5f7-4d3c-ab8a-1301936588f8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.874853] env[68638]: DEBUG nova.scheduler.client.report [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 838.899148] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae682f8e-6b56-429c-b0ee-b3260da97b70 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.907218] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b7c186f-6c18-4c4b-87f7-9cc89a37f26c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.912703] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.924868] env[68638]: DEBUG nova.virt.block_device [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Updating existing volume attachment record: a5452859-314c-4413-98ae-941e73f8b33e {{(pid=68638) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 839.063980] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Releasing lock "refresh_cache-ee752ace-fa19-4fd7-af89-f6628ce3d087" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 839.064338] env[68638]: DEBUG nova.compute.manager [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Instance network_info: |[{"id": "6f9df10a-d887-489b-b7e7-a3305f8c5c9e", "address": "fa:16:3e:56:fb:a6", "network": {"id": "2169592a-fe21-46a9-8c91-c7e04f04504e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1092388301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "46bace7ece424608bf9f88293ba6364c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b36c5ae6-c344-4bd1-8239-29128e2bbfbf", "external-id": "nsx-vlan-transportzone-214", "segmentation_id": 214, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f9df10a-d8", "ovs_interfaceid": "6f9df10a-d887-489b-b7e7-a3305f8c5c9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 839.064701] env[68638]: DEBUG oslo_concurrency.lockutils [req-c8386ea3-f26f-4efb-bf84-c619f27d9660 req-2fba9efd-e2ae-4539-85ec-b528c2ae8868 service nova] Acquired lock "refresh_cache-ee752ace-fa19-4fd7-af89-f6628ce3d087" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 839.064905] env[68638]: DEBUG nova.network.neutron [req-c8386ea3-f26f-4efb-bf84-c619f27d9660 req-2fba9efd-e2ae-4539-85ec-b528c2ae8868 service nova] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Refreshing network info cache for port 6f9df10a-d887-489b-b7e7-a3305f8c5c9e {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 839.066190] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:fb:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b36c5ae6-c344-4bd1-8239-29128e2bbfbf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6f9df10a-d887-489b-b7e7-a3305f8c5c9e', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 839.076747] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Creating folder: Project (46bace7ece424608bf9f88293ba6364c). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 839.078049] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-341e40df-17a1-42ea-b568-5e5c5a01b951 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.092513] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Created folder: Project (46bace7ece424608bf9f88293ba6364c) in parent group-v569734. [ 839.092717] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Creating folder: Instances. Parent ref: group-v569910. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 839.092983] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f3d24c4-8917-4d3b-9d1f-a2406f3b1858 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.107770] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Created folder: Instances in parent group-v569910. [ 839.108067] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 839.108342] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 839.108575] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-92da0156-1b92-4b69-8b96-e76f5e53d63f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.138842] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 839.138842] env[68638]: value = "task-2833656" [ 839.138842] env[68638]: _type = "Task" [ 839.138842] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.148768] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833656, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.200307] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833653, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.215818] env[68638]: DEBUG oslo_vmware.api [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833652, 'name': CloneVM_Task} progress is 95%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.381954] env[68638]: DEBUG oslo_concurrency.lockutils [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.194s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.385709] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.285s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 839.386210] env[68638]: DEBUG nova.objects.instance [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lazy-loading 'resources' on Instance uuid 14772ba8-bde2-42ef-9a37-df876c8af321 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 839.406849] env[68638]: INFO nova.scheduler.client.report [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Deleted allocations for instance be761cf1-0949-42c0-8a38-58af33113a03 [ 839.658626] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833656, 'name': CreateVM_Task, 'duration_secs': 0.399636} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.663162] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 839.664953] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.665227] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 839.666030] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 839.666260] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc41fe3a-6a66-407a-b2a4-b27bf680ff40 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.674998] env[68638]: DEBUG oslo_vmware.api [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 839.674998] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5242f777-f17d-2f43-f5d6-a91d7028c7c0" [ 839.674998] env[68638]: _type = "Task" [ 839.674998] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.686821] env[68638]: DEBUG oslo_vmware.api [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5242f777-f17d-2f43-f5d6-a91d7028c7c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.698593] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833653, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.711440] env[68638]: DEBUG oslo_vmware.api [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833652, 'name': CloneVM_Task} progress is 95%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.835314] env[68638]: DEBUG nova.network.neutron [req-b063bcee-25ef-46e0-a678-9adeef327061 req-1c63b4bb-cb35-4af7-af8b-9d88e3304be8 service nova] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Updated VIF entry in instance network info cache for port 0102f455-ad74-4bf4-a0b8-8a2ec1d59514. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 839.835714] env[68638]: DEBUG nova.network.neutron [req-b063bcee-25ef-46e0-a678-9adeef327061 req-1c63b4bb-cb35-4af7-af8b-9d88e3304be8 service nova] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Updating instance_info_cache with network_info: [{"id": "0102f455-ad74-4bf4-a0b8-8a2ec1d59514", "address": "fa:16:3e:1b:fc:3f", "network": {"id": "d9d5f06a-7963-44e3-8a0a-c839c3624857", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-336879211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.239", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d127964153f4854b10dfc8f8eb0009d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffcecdaa-a7b8-49fc-9371-dbdb7744688e", "external-id": "nsx-vlan-transportzone-994", "segmentation_id": 994, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0102f455-ad", "ovs_interfaceid": "0102f455-ad74-4bf4-a0b8-8a2ec1d59514", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.917740] env[68638]: DEBUG oslo_concurrency.lockutils [None req-386e0e80-c4f9-4166-a908-48bcc90abd24 tempest-ServersAdminNegativeTestJSON-728532241 tempest-ServersAdminNegativeTestJSON-728532241-project-member] Lock "be761cf1-0949-42c0-8a38-58af33113a03" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.584s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.010923] env[68638]: DEBUG nova.network.neutron [req-c8386ea3-f26f-4efb-bf84-c619f27d9660 req-2fba9efd-e2ae-4539-85ec-b528c2ae8868 service nova] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Updated VIF entry in instance network info cache for port 6f9df10a-d887-489b-b7e7-a3305f8c5c9e. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 840.011297] env[68638]: DEBUG nova.network.neutron [req-c8386ea3-f26f-4efb-bf84-c619f27d9660 req-2fba9efd-e2ae-4539-85ec-b528c2ae8868 service nova] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Updating instance_info_cache with network_info: [{"id": "6f9df10a-d887-489b-b7e7-a3305f8c5c9e", "address": "fa:16:3e:56:fb:a6", "network": {"id": "2169592a-fe21-46a9-8c91-c7e04f04504e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1092388301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "46bace7ece424608bf9f88293ba6364c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b36c5ae6-c344-4bd1-8239-29128e2bbfbf", "external-id": "nsx-vlan-transportzone-214", "segmentation_id": 214, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f9df10a-d8", "ovs_interfaceid": "6f9df10a-d887-489b-b7e7-a3305f8c5c9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.193789] env[68638]: DEBUG oslo_vmware.api [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5242f777-f17d-2f43-f5d6-a91d7028c7c0, 'name': SearchDatastore_Task, 'duration_secs': 0.023322} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.202372] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 840.203713] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 840.204129] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.204326] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.205037] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 840.205712] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-66f3729e-10f3-4a5e-bcb0-ee3238134846 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.208572] env[68638]: DEBUG nova.network.neutron [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Successfully updated port: c999665e-f15e-46cf-9d3c-b7252ab6a96a {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 840.225962] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833653, 'name': CreateVM_Task, 'duration_secs': 2.188713} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.231668] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 840.232397] env[68638]: DEBUG oslo_vmware.api [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833652, 'name': CloneVM_Task, 'duration_secs': 3.33684} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.232649] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 840.232805] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 840.234251] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.234409] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.234707] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 840.234965] env[68638]: INFO nova.virt.vmwareapi.vmops [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Created linked-clone VM from snapshot [ 840.235225] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-801fcd4f-8570-4759-959d-e998189da568 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.238031] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47d73ea4-3b17-44d0-b57b-60e301353a51 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.240530] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085d42bb-4ad8-4cff-859b-348d00f8e7a0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.250157] env[68638]: DEBUG oslo_vmware.api [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 840.250157] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d8cb8b-0c28-b5af-9e21-c4666dd28984" [ 840.250157] env[68638]: _type = "Task" [ 840.250157] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.257866] env[68638]: DEBUG oslo_vmware.api [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Waiting for the task: (returnval){ [ 840.257866] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ee18eb-d051-5379-dfc1-397d4d7d478c" [ 840.257866] env[68638]: _type = "Task" [ 840.257866] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.258297] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Uploading image 0ab383ba-81c2-40eb-a407-c1e2cb2c53f9 {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 840.273451] env[68638]: DEBUG oslo_vmware.api [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d8cb8b-0c28-b5af-9e21-c4666dd28984, 'name': SearchDatastore_Task, 'duration_secs': 0.020541} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.280715] env[68638]: DEBUG oslo_vmware.api [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ee18eb-d051-5379-dfc1-397d4d7d478c, 'name': SearchDatastore_Task, 'duration_secs': 0.019763} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.282924] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ad5d5e3-3176-4aa0-9f0a-3454166870a1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.285425] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 840.285683] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 840.285958] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.286217] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.286364] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 840.288297] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-539f28e5-bb84-4468-83b4-3951e8cd6cc5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.293217] env[68638]: DEBUG oslo_vmware.api [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 840.293217] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]526537bb-6964-078a-e79b-96a47ab86b09" [ 840.293217] env[68638]: _type = "Task" [ 840.293217] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.295056] env[68638]: DEBUG oslo_vmware.rw_handles [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 840.295056] env[68638]: value = "vm-569908" [ 840.295056] env[68638]: _type = "VirtualMachine" [ 840.295056] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 840.295371] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-97ec93f5-969d-4a4e-8ee5-f2902107edfe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.306057] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 840.306179] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 840.307470] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b032f913-5edc-4d6a-a609-35c02ca99dda {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.319096] env[68638]: DEBUG oslo_vmware.api [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]526537bb-6964-078a-e79b-96a47ab86b09, 'name': SearchDatastore_Task, 'duration_secs': 0.018811} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.319953] env[68638]: DEBUG oslo_vmware.api [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Waiting for the task: (returnval){ [ 840.319953] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]529554e4-3b20-7eca-60c2-cce7f63169e1" [ 840.319953] env[68638]: _type = "Task" [ 840.319953] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.323738] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 840.327143] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] ee752ace-fa19-4fd7-af89-f6628ce3d087/ee752ace-fa19-4fd7-af89-f6628ce3d087.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 840.327143] env[68638]: DEBUG oslo_vmware.rw_handles [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lease: (returnval){ [ 840.327143] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d68490-66ba-3c43-c98d-c874177348bf" [ 840.327143] env[68638]: _type = "HttpNfcLease" [ 840.327143] env[68638]: } obtained for exporting VM: (result){ [ 840.327143] env[68638]: value = "vm-569908" [ 840.327143] env[68638]: _type = "VirtualMachine" [ 840.327143] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 840.327143] env[68638]: DEBUG oslo_vmware.api [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the lease: (returnval){ [ 840.327143] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d68490-66ba-3c43-c98d-c874177348bf" [ 840.327143] env[68638]: _type = "HttpNfcLease" [ 840.327143] env[68638]: } to be ready. {{(pid=68638) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 840.327143] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9955b0b6-4575-4c18-bc1f-ded8ce975e45 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.342540] env[68638]: DEBUG oslo_concurrency.lockutils [req-b063bcee-25ef-46e0-a678-9adeef327061 req-1c63b4bb-cb35-4af7-af8b-9d88e3304be8 service nova] Releasing lock "refresh_cache-27ff37a6-de93-4a4b-904f-a91fdb8b0aff" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 840.349188] env[68638]: DEBUG oslo_vmware.api [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 840.349188] env[68638]: value = "task-2833658" [ 840.349188] env[68638]: _type = "Task" [ 840.349188] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.349501] env[68638]: DEBUG oslo_vmware.api [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]529554e4-3b20-7eca-60c2-cce7f63169e1, 'name': SearchDatastore_Task, 'duration_secs': 0.015048} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.349694] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 840.349694] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d68490-66ba-3c43-c98d-c874177348bf" [ 840.349694] env[68638]: _type = "HttpNfcLease" [ 840.349694] env[68638]: } is ready. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 840.350594] env[68638]: DEBUG oslo_vmware.rw_handles [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 840.350594] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d68490-66ba-3c43-c98d-c874177348bf" [ 840.350594] env[68638]: _type = "HttpNfcLease" [ 840.350594] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 840.353952] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f4f7f95-9e75-4a1d-bebf-d9afed0736b2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.357209] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e2c834-ea25-46d5-8f7d-e0172cf7b50b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.374091] env[68638]: DEBUG oslo_vmware.rw_handles [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a54621-5d0e-8de8-c8ad-49ac1313ab31/disk-0.vmdk from lease info. {{(pid=68638) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 840.374317] env[68638]: DEBUG oslo_vmware.rw_handles [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a54621-5d0e-8de8-c8ad-49ac1313ab31/disk-0.vmdk for reading. {{(pid=68638) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 840.375658] env[68638]: DEBUG oslo_vmware.api [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833658, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.375987] env[68638]: DEBUG oslo_vmware.api [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Waiting for the task: (returnval){ [ 840.375987] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52b8466a-fe20-c363-fd60-ae66e460561e" [ 840.375987] env[68638]: _type = "Task" [ 840.375987] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.451431] env[68638]: DEBUG oslo_vmware.api [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b8466a-fe20-c363-fd60-ae66e460561e, 'name': SearchDatastore_Task, 'duration_secs': 0.01212} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.455493] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 840.455493] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] a98f0c63-d327-47b9-b0c2-f7790f1ae87d/a98f0c63-d327-47b9-b0c2-f7790f1ae87d.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 840.455493] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-31df6137-1a4c-45f5-b3c3-66c7e76c03b4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.466083] env[68638]: DEBUG oslo_vmware.api [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Waiting for the task: (returnval){ [ 840.466083] env[68638]: value = "task-2833659" [ 840.466083] env[68638]: _type = "Task" [ 840.466083] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.475646] env[68638]: DEBUG oslo_vmware.api [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833659, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.498383] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-105cc4fc-373a-4c80-92b9-7d9600f4f07e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.515957] env[68638]: DEBUG oslo_concurrency.lockutils [req-c8386ea3-f26f-4efb-bf84-c619f27d9660 req-2fba9efd-e2ae-4539-85ec-b528c2ae8868 service nova] Releasing lock "refresh_cache-ee752ace-fa19-4fd7-af89-f6628ce3d087" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 840.624214] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e341261a-6061-4074-8080-532d2117daea {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.642109] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc02ec4-630c-486e-8213-956b22fee637 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.706911] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b24053-76e6-41ee-9e60-7eb255800cbe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.721729] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Acquiring lock "refresh_cache-02894a47-59b1-475b-b934-c8d0b6dabc5b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.721975] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Acquired lock "refresh_cache-02894a47-59b1-475b-b934-c8d0b6dabc5b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.722213] env[68638]: DEBUG nova.network.neutron [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 840.726230] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78fab292-fe00-427e-96ad-a5bb0e0d6a1c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.755225] env[68638]: DEBUG nova.compute.provider_tree [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 840.845921] env[68638]: DEBUG nova.compute.manager [req-3f8721cd-4c3b-4353-b29a-99d02399054c req-7eb090ec-514d-40b6-a840-f5e891e3b247 service nova] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Received event network-vif-plugged-c999665e-f15e-46cf-9d3c-b7252ab6a96a {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 840.846353] env[68638]: DEBUG oslo_concurrency.lockutils [req-3f8721cd-4c3b-4353-b29a-99d02399054c req-7eb090ec-514d-40b6-a840-f5e891e3b247 service nova] Acquiring lock "02894a47-59b1-475b-b934-c8d0b6dabc5b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 840.846680] env[68638]: DEBUG oslo_concurrency.lockutils [req-3f8721cd-4c3b-4353-b29a-99d02399054c req-7eb090ec-514d-40b6-a840-f5e891e3b247 service nova] Lock "02894a47-59b1-475b-b934-c8d0b6dabc5b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 840.846926] env[68638]: DEBUG oslo_concurrency.lockutils [req-3f8721cd-4c3b-4353-b29a-99d02399054c req-7eb090ec-514d-40b6-a840-f5e891e3b247 service nova] Lock "02894a47-59b1-475b-b934-c8d0b6dabc5b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.847230] env[68638]: DEBUG nova.compute.manager [req-3f8721cd-4c3b-4353-b29a-99d02399054c req-7eb090ec-514d-40b6-a840-f5e891e3b247 service nova] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] No waiting events found dispatching network-vif-plugged-c999665e-f15e-46cf-9d3c-b7252ab6a96a {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 840.847482] env[68638]: WARNING nova.compute.manager [req-3f8721cd-4c3b-4353-b29a-99d02399054c req-7eb090ec-514d-40b6-a840-f5e891e3b247 service nova] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Received unexpected event network-vif-plugged-c999665e-f15e-46cf-9d3c-b7252ab6a96a for instance with vm_state building and task_state spawning. [ 840.847743] env[68638]: DEBUG nova.compute.manager [req-3f8721cd-4c3b-4353-b29a-99d02399054c req-7eb090ec-514d-40b6-a840-f5e891e3b247 service nova] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Received event network-changed-c999665e-f15e-46cf-9d3c-b7252ab6a96a {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 840.847988] env[68638]: DEBUG nova.compute.manager [req-3f8721cd-4c3b-4353-b29a-99d02399054c req-7eb090ec-514d-40b6-a840-f5e891e3b247 service nova] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Refreshing instance network info cache due to event network-changed-c999665e-f15e-46cf-9d3c-b7252ab6a96a. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 840.848235] env[68638]: DEBUG oslo_concurrency.lockutils [req-3f8721cd-4c3b-4353-b29a-99d02399054c req-7eb090ec-514d-40b6-a840-f5e891e3b247 service nova] Acquiring lock "refresh_cache-02894a47-59b1-475b-b934-c8d0b6dabc5b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.869735] env[68638]: DEBUG oslo_vmware.api [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833658, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.979648] env[68638]: DEBUG oslo_vmware.api [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833659, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.045701] env[68638]: DEBUG nova.compute.manager [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 841.045701] env[68638]: DEBUG nova.virt.hardware [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 841.045946] env[68638]: DEBUG nova.virt.hardware [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 841.046339] env[68638]: DEBUG nova.virt.hardware [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 841.046339] env[68638]: DEBUG nova.virt.hardware [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 841.046459] env[68638]: DEBUG nova.virt.hardware [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 841.047160] env[68638]: DEBUG nova.virt.hardware [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 841.047160] env[68638]: DEBUG nova.virt.hardware [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 841.047160] env[68638]: DEBUG nova.virt.hardware [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 841.047160] env[68638]: DEBUG nova.virt.hardware [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 841.047325] env[68638]: DEBUG nova.virt.hardware [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 841.047479] env[68638]: DEBUG nova.virt.hardware [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 841.048475] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d0f0bd5-055e-49e5-90e6-89731323665e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.060639] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37465266-a2de-4a85-b577-cb6504ac3cb5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.265549] env[68638]: DEBUG nova.scheduler.client.report [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 841.308790] env[68638]: DEBUG nova.network.neutron [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 841.368101] env[68638]: DEBUG oslo_vmware.api [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833658, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.59044} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.368422] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] ee752ace-fa19-4fd7-af89-f6628ce3d087/ee752ace-fa19-4fd7-af89-f6628ce3d087.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 841.368756] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 841.369187] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e2ea17a2-6f1a-44ba-951b-3bf0eea23abf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.381208] env[68638]: DEBUG oslo_vmware.api [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 841.381208] env[68638]: value = "task-2833660" [ 841.381208] env[68638]: _type = "Task" [ 841.381208] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.392848] env[68638]: DEBUG oslo_vmware.api [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833660, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.477830] env[68638]: DEBUG oslo_vmware.api [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833659, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.704415} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.478231] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] a98f0c63-d327-47b9-b0c2-f7790f1ae87d/a98f0c63-d327-47b9-b0c2-f7790f1ae87d.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 841.478456] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 841.478720] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3b2291ad-1973-4b35-9f0e-484c687749e4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.489733] env[68638]: DEBUG oslo_vmware.api [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Waiting for the task: (returnval){ [ 841.489733] env[68638]: value = "task-2833661" [ 841.489733] env[68638]: _type = "Task" [ 841.489733] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.499235] env[68638]: DEBUG oslo_vmware.api [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833661, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.556043] env[68638]: DEBUG nova.network.neutron [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Updating instance_info_cache with network_info: [{"id": "c999665e-f15e-46cf-9d3c-b7252ab6a96a", "address": "fa:16:3e:25:4b:95", "network": {"id": "6433ac10-3478-4cb7-b866-bcf1d91477b6", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1096612309-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4ee3bf60f3a4d8f99f2fa20b6f13792", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc999665e-f1", "ovs_interfaceid": "c999665e-f15e-46cf-9d3c-b7252ab6a96a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.772324] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.387s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.782775] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 46.830s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.785723] env[68638]: DEBUG nova.objects.instance [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Lazy-loading 'resources' on Instance uuid 20f2c343-1f32-4c36-b4a9-8f009b6ac326 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 841.813740] env[68638]: INFO nova.scheduler.client.report [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Deleted allocations for instance 14772ba8-bde2-42ef-9a37-df876c8af321 [ 841.902235] env[68638]: DEBUG oslo_vmware.api [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833660, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092329} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.902235] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 841.905327] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1161450b-f704-4b10-b764-28cb09d499c2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.934076] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Reconfiguring VM instance instance-0000003d to attach disk [datastore2] ee752ace-fa19-4fd7-af89-f6628ce3d087/ee752ace-fa19-4fd7-af89-f6628ce3d087.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 841.934695] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3cdeb88f-8f61-431c-a9ab-df4b87edf560 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.964171] env[68638]: DEBUG oslo_vmware.api [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 841.964171] env[68638]: value = "task-2833662" [ 841.964171] env[68638]: _type = "Task" [ 841.964171] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.973045] env[68638]: DEBUG oslo_vmware.api [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833662, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.000871] env[68638]: DEBUG oslo_vmware.api [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833661, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078555} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.001219] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 842.002365] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da7b12bf-1432-491f-b10c-fae1151d096a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.031143] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] a98f0c63-d327-47b9-b0c2-f7790f1ae87d/a98f0c63-d327-47b9-b0c2-f7790f1ae87d.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 842.031822] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b97e6275-e6e4-4d9c-a357-98b7eec692a7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.055253] env[68638]: DEBUG oslo_vmware.api [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Waiting for the task: (returnval){ [ 842.055253] env[68638]: value = "task-2833663" [ 842.055253] env[68638]: _type = "Task" [ 842.055253] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.061400] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Releasing lock "refresh_cache-02894a47-59b1-475b-b934-c8d0b6dabc5b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.061782] env[68638]: DEBUG nova.compute.manager [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Instance network_info: |[{"id": "c999665e-f15e-46cf-9d3c-b7252ab6a96a", "address": "fa:16:3e:25:4b:95", "network": {"id": "6433ac10-3478-4cb7-b866-bcf1d91477b6", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1096612309-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4ee3bf60f3a4d8f99f2fa20b6f13792", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc999665e-f1", "ovs_interfaceid": "c999665e-f15e-46cf-9d3c-b7252ab6a96a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 842.062118] env[68638]: DEBUG oslo_concurrency.lockutils [req-3f8721cd-4c3b-4353-b29a-99d02399054c req-7eb090ec-514d-40b6-a840-f5e891e3b247 service nova] Acquired lock "refresh_cache-02894a47-59b1-475b-b934-c8d0b6dabc5b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.062308] env[68638]: DEBUG nova.network.neutron [req-3f8721cd-4c3b-4353-b29a-99d02399054c req-7eb090ec-514d-40b6-a840-f5e891e3b247 service nova] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Refreshing network info cache for port c999665e-f15e-46cf-9d3c-b7252ab6a96a {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 842.064489] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:25:4b:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8e7f6f41-f4eb-4832-a390-730fca1cf717', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c999665e-f15e-46cf-9d3c-b7252ab6a96a', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 842.078740] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Creating folder: Project (d4ee3bf60f3a4d8f99f2fa20b6f13792). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 842.081806] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-95415f51-b9ab-45e3-b49e-4c6f89e912fe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.088934] env[68638]: DEBUG oslo_vmware.api [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833663, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.103145] env[68638]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 842.103320] env[68638]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=68638) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 842.103729] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Folder already exists: Project (d4ee3bf60f3a4d8f99f2fa20b6f13792). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 842.103945] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Creating folder: Instances. Parent ref: group-v569865. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 842.104634] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-94f75632-512a-4ebf-83ef-0e2ccf542182 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.117781] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Created folder: Instances in parent group-v569865. [ 842.118271] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 842.119062] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 842.119062] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6d1a9741-dd56-4f3d-96c4-034a5b110e4b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.144230] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 842.144230] env[68638]: value = "task-2833666" [ 842.144230] env[68638]: _type = "Task" [ 842.144230] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.153920] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833666, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.330022] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4712db6a-eedf-42e0-9df8-cff990da046a tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "14772ba8-bde2-42ef-9a37-df876c8af321" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 52.382s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.475426] env[68638]: DEBUG oslo_vmware.api [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833662, 'name': ReconfigVM_Task, 'duration_secs': 0.369293} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.477187] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Reconfigured VM instance instance-0000003d to attach disk [datastore2] ee752ace-fa19-4fd7-af89-f6628ce3d087/ee752ace-fa19-4fd7-af89-f6628ce3d087.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 842.477187] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-88090699-9bbd-49dd-8601-dde4f4504d75 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.489480] env[68638]: DEBUG oslo_vmware.api [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 842.489480] env[68638]: value = "task-2833667" [ 842.489480] env[68638]: _type = "Task" [ 842.489480] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.500093] env[68638]: DEBUG oslo_vmware.api [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833667, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.573303] env[68638]: DEBUG oslo_vmware.api [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833663, 'name': ReconfigVM_Task, 'duration_secs': 0.359206} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.573759] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Reconfigured VM instance instance-0000003c to attach disk [datastore1] a98f0c63-d327-47b9-b0c2-f7790f1ae87d/a98f0c63-d327-47b9-b0c2-f7790f1ae87d.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 842.575116] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d346b8f3-bc88-4281-878f-fc759a96fed9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.586392] env[68638]: DEBUG oslo_vmware.api [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Waiting for the task: (returnval){ [ 842.586392] env[68638]: value = "task-2833668" [ 842.586392] env[68638]: _type = "Task" [ 842.586392] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.600339] env[68638]: DEBUG oslo_vmware.api [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833668, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.656895] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833666, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.891041] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc3a7db7-2173-4ba0-86f9-d4f9b1f895a7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.900702] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a795e908-d7da-4b09-9919-6518eafedd7f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.942994] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03f86375-179c-4194-8201-7f67f336f587 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.952819] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60e2eba-8f54-4e50-9546-3f895efc7747 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.958194] env[68638]: DEBUG nova.network.neutron [req-3f8721cd-4c3b-4353-b29a-99d02399054c req-7eb090ec-514d-40b6-a840-f5e891e3b247 service nova] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Updated VIF entry in instance network info cache for port c999665e-f15e-46cf-9d3c-b7252ab6a96a. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 842.958755] env[68638]: DEBUG nova.network.neutron [req-3f8721cd-4c3b-4353-b29a-99d02399054c req-7eb090ec-514d-40b6-a840-f5e891e3b247 service nova] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Updating instance_info_cache with network_info: [{"id": "c999665e-f15e-46cf-9d3c-b7252ab6a96a", "address": "fa:16:3e:25:4b:95", "network": {"id": "6433ac10-3478-4cb7-b866-bcf1d91477b6", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1096612309-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4ee3bf60f3a4d8f99f2fa20b6f13792", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc999665e-f1", "ovs_interfaceid": "c999665e-f15e-46cf-9d3c-b7252ab6a96a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.974957] env[68638]: DEBUG nova.compute.provider_tree [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 843.005803] env[68638]: DEBUG oslo_vmware.api [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833667, 'name': Rename_Task, 'duration_secs': 0.439533} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.006676] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 843.006934] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-99693fb5-7d43-4821-8da0-34830e9fbbe0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.015316] env[68638]: DEBUG oslo_vmware.api [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 843.015316] env[68638]: value = "task-2833669" [ 843.015316] env[68638]: _type = "Task" [ 843.015316] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.025157] env[68638]: DEBUG oslo_vmware.api [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833669, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.098988] env[68638]: DEBUG oslo_vmware.api [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833668, 'name': Rename_Task, 'duration_secs': 0.164591} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.099365] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 843.099753] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-25f70540-922a-4e13-b0a3-0bf9f6a7249c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.107970] env[68638]: DEBUG oslo_vmware.api [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Waiting for the task: (returnval){ [ 843.107970] env[68638]: value = "task-2833670" [ 843.107970] env[68638]: _type = "Task" [ 843.107970] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.118657] env[68638]: DEBUG oslo_vmware.api [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833670, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.157841] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833666, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.461235] env[68638]: DEBUG oslo_concurrency.lockutils [req-3f8721cd-4c3b-4353-b29a-99d02399054c req-7eb090ec-514d-40b6-a840-f5e891e3b247 service nova] Releasing lock "refresh_cache-02894a47-59b1-475b-b934-c8d0b6dabc5b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 843.506243] env[68638]: ERROR nova.scheduler.client.report [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] [req-8f80f291-a6c2-43ff-9490-9602f172a533] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8f80f291-a6c2-43ff-9490-9602f172a533"}]} [ 843.531376] env[68638]: DEBUG oslo_vmware.api [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833669, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.535678] env[68638]: DEBUG nova.scheduler.client.report [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 843.559660] env[68638]: DEBUG nova.scheduler.client.report [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 843.560675] env[68638]: DEBUG nova.compute.provider_tree [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 843.580571] env[68638]: DEBUG nova.scheduler.client.report [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 843.606867] env[68638]: DEBUG nova.scheduler.client.report [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 843.621502] env[68638]: DEBUG oslo_vmware.api [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833670, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.663443] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833666, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.027418] env[68638]: DEBUG oslo_vmware.api [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833669, 'name': PowerOnVM_Task, 'duration_secs': 0.665702} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.027731] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 844.027966] env[68638]: INFO nova.compute.manager [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Took 7.55 seconds to spawn the instance on the hypervisor. [ 844.028250] env[68638]: DEBUG nova.compute.manager [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 844.029199] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-648b2d0e-1e71-4b8c-ba6f-7e01f5302614 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.125760] env[68638]: DEBUG oslo_vmware.api [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833670, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.159993] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833666, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.162296] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a50f5509-9191-4d43-aa02-1b6a9add3c15 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.172311] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789d9134-d06c-46d0-8c75-bc10a37ced81 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.213105] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3444817c-3927-4d61-9107-637c8d4fed80 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.223285] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41fd4085-3001-4032-98a0-b34c2f630f0d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.240187] env[68638]: DEBUG nova.compute.provider_tree [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 844.555184] env[68638]: INFO nova.compute.manager [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Took 58.11 seconds to build instance. [ 844.621696] env[68638]: DEBUG oslo_vmware.api [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833670, 'name': PowerOnVM_Task, 'duration_secs': 1.077482} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.621971] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 844.622479] env[68638]: INFO nova.compute.manager [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Took 12.93 seconds to spawn the instance on the hypervisor. [ 844.622691] env[68638]: DEBUG nova.compute.manager [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 844.623542] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2792a373-c28d-4613-ad13-e48271522f85 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.658042] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833666, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.159072] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833666, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.193901] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833666, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.203287] env[68638]: DEBUG nova.scheduler.client.report [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 87 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 847.203594] env[68638]: DEBUG nova.compute.provider_tree [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 87 to 88 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 847.203858] env[68638]: DEBUG nova.compute.provider_tree [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 847.617167] env[68638]: DEBUG oslo_vmware.rw_handles [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529b1a93-97c9-fd64-42a4-0de2663e7c2f/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 847.618562] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d2f1e52-fa78-479f-92c4-bd3c67634626 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.627422] env[68638]: DEBUG oslo_vmware.rw_handles [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529b1a93-97c9-fd64-42a4-0de2663e7c2f/disk-0.vmdk is in state: ready. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 847.627641] env[68638]: ERROR oslo_vmware.rw_handles [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529b1a93-97c9-fd64-42a4-0de2663e7c2f/disk-0.vmdk due to incomplete transfer. [ 847.628011] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-70b6493e-f24e-453c-8a48-318186e5a868 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.629978] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ea35c071-eb90-460c-8d60-9d6460162815 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Lock "ee752ace-fa19-4fd7-af89-f6628ce3d087" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.473s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.645826] env[68638]: INFO nova.compute.manager [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Took 64.77 seconds to build instance. [ 847.646357] env[68638]: DEBUG oslo_vmware.rw_handles [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529b1a93-97c9-fd64-42a4-0de2663e7c2f/disk-0.vmdk. {{(pid=68638) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 847.646640] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Uploaded image 380d98bc-ca07-48a4-9708-7df38f3a8d75 to the Glance image server {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 847.650707] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Destroying the VM {{(pid=68638) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 847.651731] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9894ecfa-d7f2-4abd-9d87-6938a99a5e2a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.660610] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833666, 'name': CreateVM_Task, 'duration_secs': 5.278938} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.663923] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 847.663923] env[68638]: DEBUG oslo_vmware.api [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 847.663923] env[68638]: value = "task-2833671" [ 847.663923] env[68638]: _type = "Task" [ 847.663923] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.663923] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sda', 'attachment_id': 'a5452859-314c-4413-98ae-941e73f8b33e', 'device_type': None, 'disk_bus': None, 'delete_on_termination': True, 'guest_format': None, 'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569870', 'volume_id': '4dc574c0-0283-4f21-ac01-f714b10306da', 'name': 'volume-4dc574c0-0283-4f21-ac01-f714b10306da', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '02894a47-59b1-475b-b934-c8d0b6dabc5b', 'attached_at': '', 'detached_at': '', 'volume_id': '4dc574c0-0283-4f21-ac01-f714b10306da', 'serial': '4dc574c0-0283-4f21-ac01-f714b10306da'}, 'volume_type': None}], 'swap': None} {{(pid=68638) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 847.663923] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Root volume attach. Driver type: vmdk {{(pid=68638) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 847.664797] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6487fb-c663-4688-a60b-786bba9f322a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.683450] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cecdf542-3f5e-4e24-96f7-87c6bd07ba1d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.683909] env[68638]: DEBUG oslo_vmware.api [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833671, 'name': Destroy_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.689244] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cf3aa97-81f1-4bc5-a739-181ae83fd1da {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.698120] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-aff4c50c-b634-4357-ad44-59cbbaa38fed {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.709836] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 5.932s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.712539] env[68638]: DEBUG oslo_vmware.api [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Waiting for the task: (returnval){ [ 847.712539] env[68638]: value = "task-2833672" [ 847.712539] env[68638]: _type = "Task" [ 847.712539] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.713120] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 51.137s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.714863] env[68638]: INFO nova.compute.claims [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 847.727643] env[68638]: DEBUG oslo_vmware.api [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Task: {'id': task-2833672, 'name': RelocateVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.742870] env[68638]: INFO nova.scheduler.client.report [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Deleted allocations for instance 20f2c343-1f32-4c36-b4a9-8f009b6ac326 [ 848.138237] env[68638]: DEBUG nova.compute.manager [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 848.152203] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bd01ec41-c1d1-4d07-9496-7ff1ffd42ab3 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Lock "a98f0c63-d327-47b9-b0c2-f7790f1ae87d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.086s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.180278] env[68638]: DEBUG oslo_vmware.api [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833671, 'name': Destroy_Task} progress is 33%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.234550] env[68638]: DEBUG oslo_vmware.api [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Task: {'id': task-2833672, 'name': RelocateVM_Task} progress is 20%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.250935] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e0c2e0a6-dff3-46c1-9797-611d15634e69 tempest-ServerShowV247Test-2042402141 tempest-ServerShowV247Test-2042402141-project-member] Lock "20f2c343-1f32-4c36-b4a9-8f009b6ac326" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 57.543s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.661166] env[68638]: DEBUG nova.compute.manager [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 848.664151] env[68638]: DEBUG oslo_concurrency.lockutils [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.674967] env[68638]: DEBUG oslo_vmware.api [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833671, 'name': Destroy_Task, 'duration_secs': 0.562614} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.675286] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Destroyed the VM [ 848.675561] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Deleting Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 848.679016] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-79f21a4d-3508-49fd-b099-8d71225813b0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.686434] env[68638]: DEBUG oslo_vmware.api [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 848.686434] env[68638]: value = "task-2833673" [ 848.686434] env[68638]: _type = "Task" [ 848.686434] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.698375] env[68638]: DEBUG oslo_vmware.api [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833673, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.732267] env[68638]: DEBUG oslo_vmware.api [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Task: {'id': task-2833672, 'name': RelocateVM_Task, 'duration_secs': 0.762057} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.734535] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Volume attach. Driver type: vmdk {{(pid=68638) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 848.734535] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569870', 'volume_id': '4dc574c0-0283-4f21-ac01-f714b10306da', 'name': 'volume-4dc574c0-0283-4f21-ac01-f714b10306da', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '02894a47-59b1-475b-b934-c8d0b6dabc5b', 'attached_at': '', 'detached_at': '', 'volume_id': '4dc574c0-0283-4f21-ac01-f714b10306da', 'serial': '4dc574c0-0283-4f21-ac01-f714b10306da'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 848.734963] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c1b55cf-e6ec-46d2-853f-fa5e32b6569a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.754779] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de08df0-bb33-440a-a4dd-b3d79a6f7bcf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.783089] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] volume-4dc574c0-0283-4f21-ac01-f714b10306da/volume-4dc574c0-0283-4f21-ac01-f714b10306da.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 848.786598] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7de067aa-bc26-44aa-961a-425420091006 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.810341] env[68638]: DEBUG oslo_vmware.api [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Waiting for the task: (returnval){ [ 848.810341] env[68638]: value = "task-2833674" [ 848.810341] env[68638]: _type = "Task" [ 848.810341] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.830551] env[68638]: DEBUG oslo_vmware.api [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Task: {'id': task-2833674, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.197657] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.201101] env[68638]: DEBUG oslo_vmware.api [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833673, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.321122] env[68638]: DEBUG oslo_vmware.api [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Task: {'id': task-2833674, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.337952] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d881aa8-33cc-451b-893a-0371617bb15f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.348575] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3268f13a-6cfb-4215-8359-01f23fca2b3b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.380519] env[68638]: INFO nova.compute.manager [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Rescuing [ 849.381610] env[68638]: DEBUG oslo_concurrency.lockutils [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquiring lock "refresh_cache-ee752ace-fa19-4fd7-af89-f6628ce3d087" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.381610] env[68638]: DEBUG oslo_concurrency.lockutils [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquired lock "refresh_cache-ee752ace-fa19-4fd7-af89-f6628ce3d087" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 849.381610] env[68638]: DEBUG nova.network.neutron [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 849.383323] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c3f8aa-6b82-461b-907b-d41ed1eb35a3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.396732] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-488a37b6-4cfb-4255-b4cc-7943c9062bb0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.415726] env[68638]: DEBUG nova.compute.provider_tree [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 849.456371] env[68638]: DEBUG oslo_concurrency.lockutils [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Acquiring lock "fd6d5951-f2a1-422d-b137-4d19759f9060" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.456773] env[68638]: DEBUG oslo_concurrency.lockutils [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Lock "fd6d5951-f2a1-422d-b137-4d19759f9060" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.657576] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Acquiring lock "a98f0c63-d327-47b9-b0c2-f7790f1ae87d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.657576] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Lock "a98f0c63-d327-47b9-b0c2-f7790f1ae87d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.657576] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Acquiring lock "a98f0c63-d327-47b9-b0c2-f7790f1ae87d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.657576] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Lock "a98f0c63-d327-47b9-b0c2-f7790f1ae87d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.657576] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Lock "a98f0c63-d327-47b9-b0c2-f7790f1ae87d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 849.658120] env[68638]: INFO nova.compute.manager [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Terminating instance [ 849.700103] env[68638]: DEBUG oslo_vmware.api [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833673, 'name': RemoveSnapshot_Task, 'duration_secs': 0.749484} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.700368] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Deleted Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 849.700750] env[68638]: INFO nova.compute.manager [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Took 19.66 seconds to snapshot the instance on the hypervisor. [ 849.824567] env[68638]: DEBUG oslo_vmware.api [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Task: {'id': task-2833674, 'name': ReconfigVM_Task, 'duration_secs': 0.710773} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.824931] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Reconfigured VM instance instance-0000003e to attach disk [datastore2] volume-4dc574c0-0283-4f21-ac01-f714b10306da/volume-4dc574c0-0283-4f21-ac01-f714b10306da.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 849.830986] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c888631e-6a4c-4af6-938c-d07406ac4e50 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.851558] env[68638]: DEBUG oslo_vmware.api [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Waiting for the task: (returnval){ [ 849.851558] env[68638]: value = "task-2833675" [ 849.851558] env[68638]: _type = "Task" [ 849.851558] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.863090] env[68638]: DEBUG oslo_vmware.api [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Task: {'id': task-2833675, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.919540] env[68638]: DEBUG nova.scheduler.client.report [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 849.944041] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e2477738-c159-4338-98f8-f21025d12b57 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.944305] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e2477738-c159-4338-98f8-f21025d12b57 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.944498] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e2477738-c159-4338-98f8-f21025d12b57 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.944688] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e2477738-c159-4338-98f8-f21025d12b57 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.944905] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e2477738-c159-4338-98f8-f21025d12b57 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 849.946842] env[68638]: INFO nova.compute.manager [None req-e2477738-c159-4338-98f8-f21025d12b57 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Terminating instance [ 850.139335] env[68638]: DEBUG nova.network.neutron [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Updating instance_info_cache with network_info: [{"id": "6f9df10a-d887-489b-b7e7-a3305f8c5c9e", "address": "fa:16:3e:56:fb:a6", "network": {"id": "2169592a-fe21-46a9-8c91-c7e04f04504e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1092388301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "46bace7ece424608bf9f88293ba6364c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b36c5ae6-c344-4bd1-8239-29128e2bbfbf", "external-id": "nsx-vlan-transportzone-214", "segmentation_id": 214, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f9df10a-d8", "ovs_interfaceid": "6f9df10a-d887-489b-b7e7-a3305f8c5c9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.166172] env[68638]: DEBUG nova.compute.manager [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 850.166419] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 850.169938] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18062de3-7fbd-4613-a826-f78b01e9203d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.179456] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 850.179731] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6363825b-d954-4c34-83f7-79667fc03de9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.188991] env[68638]: DEBUG oslo_vmware.api [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Waiting for the task: (returnval){ [ 850.188991] env[68638]: value = "task-2833676" [ 850.188991] env[68638]: _type = "Task" [ 850.188991] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.200161] env[68638]: DEBUG oslo_vmware.api [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833676, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.208188] env[68638]: DEBUG nova.compute.manager [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Instance disappeared during snapshot {{(pid=68638) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 850.220180] env[68638]: DEBUG nova.compute.manager [None req-ee831493-292e-408e-ab5d-b16fd830425c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Image not found during clean up 380d98bc-ca07-48a4-9708-7df38f3a8d75 {{(pid=68638) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 850.363858] env[68638]: DEBUG oslo_vmware.api [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Task: {'id': task-2833675, 'name': ReconfigVM_Task, 'duration_secs': 0.238973} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.364429] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569870', 'volume_id': '4dc574c0-0283-4f21-ac01-f714b10306da', 'name': 'volume-4dc574c0-0283-4f21-ac01-f714b10306da', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '02894a47-59b1-475b-b934-c8d0b6dabc5b', 'attached_at': '', 'detached_at': '', 'volume_id': '4dc574c0-0283-4f21-ac01-f714b10306da', 'serial': '4dc574c0-0283-4f21-ac01-f714b10306da'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 850.365579] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b50d8a44-105e-4192-a9f6-5eb7ffc93042 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.375576] env[68638]: DEBUG oslo_vmware.api [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Waiting for the task: (returnval){ [ 850.375576] env[68638]: value = "task-2833677" [ 850.375576] env[68638]: _type = "Task" [ 850.375576] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.390417] env[68638]: DEBUG oslo_vmware.api [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Task: {'id': task-2833677, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.430136] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.715s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 850.430136] env[68638]: DEBUG nova.compute.manager [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 850.436342] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 50.804s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 850.436729] env[68638]: DEBUG nova.objects.instance [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lazy-loading 'resources' on Instance uuid 772af0c0-a8dd-4167-87bc-617a9d95b54d {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 850.452128] env[68638]: DEBUG nova.compute.manager [None req-e2477738-c159-4338-98f8-f21025d12b57 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 850.452437] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e2477738-c159-4338-98f8-f21025d12b57 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 850.457264] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e6f3384-baa1-4a55-8846-d72b6c4ed118 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.470230] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e2477738-c159-4338-98f8-f21025d12b57 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 850.470566] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e60332d1-d0b3-4d2d-865c-9e1221091699 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.644975] env[68638]: DEBUG oslo_concurrency.lockutils [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Releasing lock "refresh_cache-ee752ace-fa19-4fd7-af89-f6628ce3d087" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 850.678164] env[68638]: DEBUG oslo_vmware.rw_handles [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a54621-5d0e-8de8-c8ad-49ac1313ab31/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 850.681032] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10926ef7-3f78-47e9-a15b-06799fcfd712 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.687340] env[68638]: DEBUG oslo_vmware.rw_handles [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a54621-5d0e-8de8-c8ad-49ac1313ab31/disk-0.vmdk is in state: ready. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 850.687509] env[68638]: ERROR oslo_vmware.rw_handles [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a54621-5d0e-8de8-c8ad-49ac1313ab31/disk-0.vmdk due to incomplete transfer. [ 850.687744] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-613982be-8e97-45cb-9c17-c14719ff008d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.700037] env[68638]: DEBUG oslo_vmware.api [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833676, 'name': PowerOffVM_Task, 'duration_secs': 0.298548} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.700217] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 850.700492] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 850.700636] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-00b9ccd2-7489-4686-b484-68da72d416fd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.710513] env[68638]: DEBUG oslo_vmware.rw_handles [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a54621-5d0e-8de8-c8ad-49ac1313ab31/disk-0.vmdk. {{(pid=68638) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 850.710739] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Uploaded image 0ab383ba-81c2-40eb-a407-c1e2cb2c53f9 to the Glance image server {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 850.712479] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Destroying the VM {{(pid=68638) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 850.712820] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ee2a4b92-6b7f-490c-abe6-bee4bab0b064 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.722195] env[68638]: DEBUG oslo_vmware.api [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 850.722195] env[68638]: value = "task-2833680" [ 850.722195] env[68638]: _type = "Task" [ 850.722195] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.731864] env[68638]: DEBUG oslo_vmware.api [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833680, 'name': Destroy_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.764322] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "9975e756-b571-4e70-ba50-a6001d0b064c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.764562] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "9975e756-b571-4e70-ba50-a6001d0b064c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 850.862034] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e2477738-c159-4338-98f8-f21025d12b57 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 850.862188] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e2477738-c159-4338-98f8-f21025d12b57 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 850.862405] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2477738-c159-4338-98f8-f21025d12b57 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Deleting the datastore file [datastore1] 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 850.862739] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50d92bf5-834d-42e5-ab43-896d6852c853 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.871034] env[68638]: DEBUG oslo_vmware.api [None req-e2477738-c159-4338-98f8-f21025d12b57 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 850.871034] env[68638]: value = "task-2833681" [ 850.871034] env[68638]: _type = "Task" [ 850.871034] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.882845] env[68638]: DEBUG oslo_vmware.api [None req-e2477738-c159-4338-98f8-f21025d12b57 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833681, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.888961] env[68638]: DEBUG oslo_vmware.api [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Task: {'id': task-2833677, 'name': Rename_Task, 'duration_secs': 0.274805} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.889560] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 850.889560] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0f78fe82-1e6c-4441-b176-2d1b7a3be895 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.892995] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 850.893222] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 850.893474] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Deleting the datastore file [datastore1] a98f0c63-d327-47b9-b0c2-f7790f1ae87d {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 850.893842] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8120fd29-3582-4e1e-909b-298aa81cf4ca {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.898826] env[68638]: DEBUG oslo_vmware.api [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Waiting for the task: (returnval){ [ 850.898826] env[68638]: value = "task-2833682" [ 850.898826] env[68638]: _type = "Task" [ 850.898826] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.904185] env[68638]: DEBUG oslo_vmware.api [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Waiting for the task: (returnval){ [ 850.904185] env[68638]: value = "task-2833683" [ 850.904185] env[68638]: _type = "Task" [ 850.904185] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.910929] env[68638]: DEBUG oslo_vmware.api [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Task: {'id': task-2833682, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.916918] env[68638]: DEBUG oslo_vmware.api [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833683, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.940572] env[68638]: DEBUG nova.compute.utils [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 850.945443] env[68638]: DEBUG nova.compute.manager [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 850.945597] env[68638]: DEBUG nova.network.neutron [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 851.016113] env[68638]: DEBUG nova.policy [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a5fce0bf2fb44b84afd238d875790fbd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ccc24eaf6cf74d539558c0a736e18c3e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 852.058048] env[68638]: DEBUG oslo_vmware.api [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833680, 'name': Destroy_Task} progress is 33%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.058048] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374009b8-51eb-47db-a06e-fcdb925af9e8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.058048] env[68638]: DEBUG oslo_vmware.api [None req-e2477738-c159-4338-98f8-f21025d12b57 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833681, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197782} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.058048] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2477738-c159-4338-98f8-f21025d12b57 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 852.058048] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e2477738-c159-4338-98f8-f21025d12b57 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 852.058048] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e2477738-c159-4338-98f8-f21025d12b57 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 852.058048] env[68638]: INFO nova.compute.manager [None req-e2477738-c159-4338-98f8-f21025d12b57 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Took 0.93 seconds to destroy the instance on the hypervisor. [ 852.058048] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e2477738-c159-4338-98f8-f21025d12b57 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 852.058048] env[68638]: DEBUG nova.compute.manager [-] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 852.058048] env[68638]: DEBUG nova.network.neutron [-] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 852.058048] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92aaca5c-d635-4d71-b1da-12c484dad33c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.058048] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad57ef7e-d993-4ce8-8707-f53b6ebe6121 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.058048] env[68638]: DEBUG oslo_vmware.api [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Task: {'id': task-2833683, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198982} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.058048] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 852.058048] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 852.058048] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 852.058048] env[68638]: INFO nova.compute.manager [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Took 1.27 seconds to destroy the instance on the hypervisor. [ 852.061291] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 852.061291] env[68638]: DEBUG oslo_vmware.api [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Task: {'id': task-2833682, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.061291] env[68638]: DEBUG nova.compute.manager [-] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 852.061291] env[68638]: DEBUG nova.network.neutron [-] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 852.061291] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b343de9-60fd-4ccf-afe5-18b31059cdac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.061291] env[68638]: DEBUG nova.compute.provider_tree [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 852.061291] env[68638]: DEBUG oslo_vmware.api [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833680, 'name': Destroy_Task, 'duration_secs': 0.698407} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.061291] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Destroyed the VM [ 852.061291] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Deleting Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 852.061291] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2c23904d-2f43-4da9-a517-4baeb312e537 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.061291] env[68638]: DEBUG oslo_vmware.api [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 852.061291] env[68638]: value = "task-2833684" [ 852.061291] env[68638]: _type = "Task" [ 852.061291] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.061291] env[68638]: DEBUG oslo_vmware.api [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833684, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.061291] env[68638]: DEBUG oslo_vmware.api [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Task: {'id': task-2833682, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.061291] env[68638]: ERROR nova.scheduler.client.report [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [req-6291be73-78ce-4d8e-995b-65ef06cc6876] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6291be73-78ce-4d8e-995b-65ef06cc6876"}]} [ 852.063140] env[68638]: DEBUG nova.scheduler.client.report [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 852.063140] env[68638]: DEBUG nova.scheduler.client.report [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 852.063140] env[68638]: DEBUG nova.compute.provider_tree [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 852.063140] env[68638]: DEBUG nova.scheduler.client.report [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 852.063140] env[68638]: DEBUG nova.scheduler.client.report [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 852.151431] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Acquiring lock "1bc685aa-4e88-402f-b581-d179706b12a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.151908] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Lock "1bc685aa-4e88-402f-b581-d179706b12a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.183154] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 852.183154] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1154425e-b31e-40ca-a5c3-77861e4cb82a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.191045] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 852.191045] env[68638]: value = "task-2833685" [ 852.191045] env[68638]: _type = "Task" [ 852.191045] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.210090] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833685, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.254824] env[68638]: DEBUG oslo_vmware.api [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833684, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.435709] env[68638]: DEBUG oslo_vmware.api [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Task: {'id': task-2833682, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.442825] env[68638]: DEBUG nova.network.neutron [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Successfully created port: fb14533d-1dc9-4440-a62d-ab3ca16bc7f1 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 852.447580] env[68638]: DEBUG nova.compute.manager [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 852.601422] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d9f74a-fb81-4e94-97c4-a6ddf0810f10 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.611019] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d00bce-a588-4ad7-b317-78771da1c5ec {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.654953] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e06993-1402-4512-83b8-b0285132beff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.662921] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c2ceba1-040f-445e-87a0-aa2d7ed9eebc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.677409] env[68638]: DEBUG nova.compute.provider_tree [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 852.678769] env[68638]: DEBUG nova.network.neutron [-] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.706100] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833685, 'name': PowerOffVM_Task, 'duration_secs': 0.258126} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.706407] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 852.707265] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-907693d4-16c8-465c-87cc-d0cccc1cf7a8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.726938] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c59e8387-2ff4-44cb-9d9d-cfc5cf1217c7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.751979] env[68638]: DEBUG oslo_vmware.api [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833684, 'name': RemoveSnapshot_Task, 'duration_secs': 0.64396} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.752286] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Deleted Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 852.752505] env[68638]: INFO nova.compute.manager [None req-a31f69ec-879a-4429-80a6-6ca238446f4b tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Took 18.19 seconds to snapshot the instance on the hypervisor. [ 852.775019] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 852.775019] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5996ba54-5f5c-4b53-b398-763c1353bf28 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.782336] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 852.782336] env[68638]: value = "task-2833686" [ 852.782336] env[68638]: _type = "Task" [ 852.782336] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.794616] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] VM already powered off {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 852.794616] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 852.794815] env[68638]: DEBUG oslo_concurrency.lockutils [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.794993] env[68638]: DEBUG oslo_concurrency.lockutils [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 852.795241] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 852.795707] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-871375cb-771d-404b-808a-157376d12ce3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.807729] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 852.807884] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 852.808916] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00d8955b-cdc6-430c-a328-b5a7d4a561df {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.815597] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 852.815597] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]520a9c94-5628-6a22-a572-99dc963dc649" [ 852.815597] env[68638]: _type = "Task" [ 852.815597] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.825600] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]520a9c94-5628-6a22-a572-99dc963dc649, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.870421] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "53e92f51-9010-4fb2-89e1-9d16a252ef6e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.870421] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "53e92f51-9010-4fb2-89e1-9d16a252ef6e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.930586] env[68638]: DEBUG oslo_vmware.api [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Task: {'id': task-2833682, 'name': PowerOnVM_Task, 'duration_secs': 1.646003} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.930876] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 852.931100] env[68638]: INFO nova.compute.manager [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Took 11.89 seconds to spawn the instance on the hypervisor. [ 852.931306] env[68638]: DEBUG nova.compute.manager [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 852.932109] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da4b8f8c-0b45-403f-a9a4-34905e940d5b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.936734] env[68638]: DEBUG nova.compute.manager [req-a602934c-5cc1-4c1f-875c-61e3492720ed req-61caf0fb-22f1-40ea-87d4-400409b4e86d service nova] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Received event network-vif-deleted-7ccad3d9-aa85-4881-a1ec-c4e32106fb16 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 853.022916] env[68638]: DEBUG nova.network.neutron [-] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.186349] env[68638]: INFO nova.compute.manager [-] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Took 1.80 seconds to deallocate network for instance. [ 853.202832] env[68638]: ERROR nova.scheduler.client.report [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [req-f6a4bb67-6915-424b-b31f-ee614f7add4b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f6a4bb67-6915-424b-b31f-ee614f7add4b"}]} [ 853.222279] env[68638]: DEBUG nova.scheduler.client.report [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 853.238009] env[68638]: DEBUG nova.scheduler.client.report [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 853.238270] env[68638]: DEBUG nova.compute.provider_tree [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 853.251343] env[68638]: DEBUG nova.scheduler.client.report [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 853.270989] env[68638]: DEBUG nova.scheduler.client.report [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 853.326495] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]520a9c94-5628-6a22-a572-99dc963dc649, 'name': SearchDatastore_Task, 'duration_secs': 0.018083} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.329841] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d2228a7-588f-4915-a177-9b5ebdf33e26 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.336594] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 853.336594] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52a0a21a-0ed2-ae64-0e4b-72e56b977327" [ 853.336594] env[68638]: _type = "Task" [ 853.336594] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.349810] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a0a21a-0ed2-ae64-0e4b-72e56b977327, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.456071] env[68638]: INFO nova.compute.manager [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Took 64.12 seconds to build instance. [ 853.457338] env[68638]: DEBUG nova.compute.manager [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 853.486300] env[68638]: DEBUG nova.virt.hardware [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 853.486300] env[68638]: DEBUG nova.virt.hardware [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 853.486499] env[68638]: DEBUG nova.virt.hardware [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 853.486616] env[68638]: DEBUG nova.virt.hardware [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 853.486772] env[68638]: DEBUG nova.virt.hardware [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 853.486915] env[68638]: DEBUG nova.virt.hardware [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 853.487135] env[68638]: DEBUG nova.virt.hardware [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 853.487295] env[68638]: DEBUG nova.virt.hardware [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 853.487468] env[68638]: DEBUG nova.virt.hardware [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 853.487611] env[68638]: DEBUG nova.virt.hardware [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 853.487788] env[68638]: DEBUG nova.virt.hardware [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 853.488900] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7af824e-6f05-4e2b-8ce2-9459fb381d67 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.504316] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56bb249c-8451-4883-bb54-ff98f3556635 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.526910] env[68638]: INFO nova.compute.manager [-] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Took 2.09 seconds to deallocate network for instance. [ 853.694506] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e2477738-c159-4338-98f8-f21025d12b57 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 853.733626] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc56b12-136b-4ff1-b7c8-bb767d451b87 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.742022] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cddf61d-938d-43fc-a30e-b39a39aecc79 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.772365] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-906d833e-2526-43b2-9a5d-5ef32640df10 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.780196] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7791b617-32c1-40f3-b869-d42ba5581ee2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.793422] env[68638]: DEBUG nova.compute.provider_tree [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 853.847850] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a0a21a-0ed2-ae64-0e4b-72e56b977327, 'name': SearchDatastore_Task, 'duration_secs': 0.012772} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.847850] env[68638]: DEBUG oslo_concurrency.lockutils [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 853.847850] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] ee752ace-fa19-4fd7-af89-f6628ce3d087/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9-rescue.vmdk. {{(pid=68638) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 853.847850] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-22399a38-8652-464d-abfc-6d717a6076db {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.855628] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 853.855628] env[68638]: value = "task-2833687" [ 853.855628] env[68638]: _type = "Task" [ 853.855628] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.864338] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833687, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.962674] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7b568bbd-6724-4f37-bed9-8df729b7ed40 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Lock "02894a47-59b1-475b-b934-c8d0b6dabc5b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.571s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.033140] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.209869] env[68638]: DEBUG nova.compute.manager [req-564a488e-1e54-4947-a3db-256b0ac79c43 req-685b4546-f90c-4642-850a-0a41bb64650f service nova] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Received event network-vif-plugged-fb14533d-1dc9-4440-a62d-ab3ca16bc7f1 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 854.210228] env[68638]: DEBUG oslo_concurrency.lockutils [req-564a488e-1e54-4947-a3db-256b0ac79c43 req-685b4546-f90c-4642-850a-0a41bb64650f service nova] Acquiring lock "4c954bb4-6291-47d5-a65c-0ad92a0fd193-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.210340] env[68638]: DEBUG oslo_concurrency.lockutils [req-564a488e-1e54-4947-a3db-256b0ac79c43 req-685b4546-f90c-4642-850a-0a41bb64650f service nova] Lock "4c954bb4-6291-47d5-a65c-0ad92a0fd193-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.210759] env[68638]: DEBUG oslo_concurrency.lockutils [req-564a488e-1e54-4947-a3db-256b0ac79c43 req-685b4546-f90c-4642-850a-0a41bb64650f service nova] Lock "4c954bb4-6291-47d5-a65c-0ad92a0fd193-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.210759] env[68638]: DEBUG nova.compute.manager [req-564a488e-1e54-4947-a3db-256b0ac79c43 req-685b4546-f90c-4642-850a-0a41bb64650f service nova] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] No waiting events found dispatching network-vif-plugged-fb14533d-1dc9-4440-a62d-ab3ca16bc7f1 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 854.210901] env[68638]: WARNING nova.compute.manager [req-564a488e-1e54-4947-a3db-256b0ac79c43 req-685b4546-f90c-4642-850a-0a41bb64650f service nova] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Received unexpected event network-vif-plugged-fb14533d-1dc9-4440-a62d-ab3ca16bc7f1 for instance with vm_state building and task_state spawning. [ 854.320715] env[68638]: DEBUG nova.network.neutron [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Successfully updated port: fb14533d-1dc9-4440-a62d-ab3ca16bc7f1 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 854.340764] env[68638]: DEBUG nova.scheduler.client.report [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 92 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 854.341113] env[68638]: DEBUG nova.compute.provider_tree [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 92 to 93 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 854.341329] env[68638]: DEBUG nova.compute.provider_tree [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 854.372183] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833687, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.466456] env[68638]: DEBUG nova.compute.manager [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 854.502791] env[68638]: DEBUG nova.compute.manager [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 854.503793] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c9c785a-271f-4b7d-8440-943a93566c41 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.825307] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "refresh_cache-4c954bb4-6291-47d5-a65c-0ad92a0fd193" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.825463] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquired lock "refresh_cache-4c954bb4-6291-47d5-a65c-0ad92a0fd193" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 854.825623] env[68638]: DEBUG nova.network.neutron [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 854.846836] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.415s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.849126] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 52.321s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.850725] env[68638]: INFO nova.compute.claims [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 854.870114] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833687, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.611059} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.871206] env[68638]: INFO nova.scheduler.client.report [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Deleted allocations for instance 772af0c0-a8dd-4167-87bc-617a9d95b54d [ 854.872089] env[68638]: INFO nova.virt.vmwareapi.ds_util [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] ee752ace-fa19-4fd7-af89-f6628ce3d087/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9-rescue.vmdk. [ 854.875076] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95aaca75-5378-4846-8c70-babce8832435 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.901554] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Reconfiguring VM instance instance-0000003d to attach disk [datastore2] ee752ace-fa19-4fd7-af89-f6628ce3d087/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9-rescue.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 854.902454] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d2e2508-b784-4897-a0c4-e91760461063 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.924070] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 854.924070] env[68638]: value = "task-2833688" [ 854.924070] env[68638]: _type = "Task" [ 854.924070] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.931804] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833688, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.970286] env[68638]: DEBUG nova.compute.manager [req-9dd5dc70-2136-46e9-8194-38d87aed6f6d req-0fa961a9-4d57-411d-87d2-a98591f459b4 service nova] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Received event network-vif-deleted-ceee0c90-edea-4bd9-ba2e-c7de2fe8b47b {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 854.970500] env[68638]: DEBUG nova.compute.manager [req-9dd5dc70-2136-46e9-8194-38d87aed6f6d req-0fa961a9-4d57-411d-87d2-a98591f459b4 service nova] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Received event network-vif-deleted-00c4f801-ad2b-4bfa-b69e-338c7f8e36bc {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 854.970674] env[68638]: DEBUG nova.compute.manager [req-9dd5dc70-2136-46e9-8194-38d87aed6f6d req-0fa961a9-4d57-411d-87d2-a98591f459b4 service nova] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Received event network-changed-c999665e-f15e-46cf-9d3c-b7252ab6a96a {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 854.970829] env[68638]: DEBUG nova.compute.manager [req-9dd5dc70-2136-46e9-8194-38d87aed6f6d req-0fa961a9-4d57-411d-87d2-a98591f459b4 service nova] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Refreshing instance network info cache due to event network-changed-c999665e-f15e-46cf-9d3c-b7252ab6a96a. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 854.971055] env[68638]: DEBUG oslo_concurrency.lockutils [req-9dd5dc70-2136-46e9-8194-38d87aed6f6d req-0fa961a9-4d57-411d-87d2-a98591f459b4 service nova] Acquiring lock "refresh_cache-02894a47-59b1-475b-b934-c8d0b6dabc5b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.971199] env[68638]: DEBUG oslo_concurrency.lockutils [req-9dd5dc70-2136-46e9-8194-38d87aed6f6d req-0fa961a9-4d57-411d-87d2-a98591f459b4 service nova] Acquired lock "refresh_cache-02894a47-59b1-475b-b934-c8d0b6dabc5b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 854.971355] env[68638]: DEBUG nova.network.neutron [req-9dd5dc70-2136-46e9-8194-38d87aed6f6d req-0fa961a9-4d57-411d-87d2-a98591f459b4 service nova] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Refreshing network info cache for port c999665e-f15e-46cf-9d3c-b7252ab6a96a {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 854.987824] env[68638]: DEBUG oslo_concurrency.lockutils [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 855.018471] env[68638]: INFO nova.compute.manager [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] instance snapshotting [ 855.021179] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3a66bc-fbbd-49c8-81cf-9683d377171e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.043855] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d750536-0f43-496c-bdb4-7eaff1c18965 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.357674] env[68638]: DEBUG nova.network.neutron [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 855.381034] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c24c5420-34cf-4dd5-8fee-d3a67c248a4e tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "772af0c0-a8dd-4167-87bc-617a9d95b54d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 59.233s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.434559] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833688, 'name': ReconfigVM_Task, 'duration_secs': 0.354508} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.434979] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Reconfigured VM instance instance-0000003d to attach disk [datastore2] ee752ace-fa19-4fd7-af89-f6628ce3d087/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9-rescue.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 855.435860] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76378256-1419-4fb7-9be3-cd1bda506393 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.468854] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b96d873-cfaf-4fb2-8f5c-15c09e157e0e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.494020] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 855.494020] env[68638]: value = "task-2833689" [ 855.494020] env[68638]: _type = "Task" [ 855.494020] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.501984] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833689, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.540045] env[68638]: DEBUG nova.network.neutron [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Updating instance_info_cache with network_info: [{"id": "fb14533d-1dc9-4440-a62d-ab3ca16bc7f1", "address": "fa:16:3e:f9:6a:25", "network": {"id": "5de0e424-8bf1-4515-8c49-06607ad85c61", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1760008184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc24eaf6cf74d539558c0a736e18c3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb14533d-1d", "ovs_interfaceid": "fb14533d-1dc9-4440-a62d-ab3ca16bc7f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.555525] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Creating Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 855.556130] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e4731ef6-f9a5-43bd-9eea-e35d9cef77ea {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.564618] env[68638]: DEBUG oslo_vmware.api [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 855.564618] env[68638]: value = "task-2833690" [ 855.564618] env[68638]: _type = "Task" [ 855.564618] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.579071] env[68638]: DEBUG oslo_vmware.api [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833690, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.787649] env[68638]: DEBUG nova.network.neutron [req-9dd5dc70-2136-46e9-8194-38d87aed6f6d req-0fa961a9-4d57-411d-87d2-a98591f459b4 service nova] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Updated VIF entry in instance network info cache for port c999665e-f15e-46cf-9d3c-b7252ab6a96a. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 855.788084] env[68638]: DEBUG nova.network.neutron [req-9dd5dc70-2136-46e9-8194-38d87aed6f6d req-0fa961a9-4d57-411d-87d2-a98591f459b4 service nova] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Updating instance_info_cache with network_info: [{"id": "c999665e-f15e-46cf-9d3c-b7252ab6a96a", "address": "fa:16:3e:25:4b:95", "network": {"id": "6433ac10-3478-4cb7-b866-bcf1d91477b6", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1096612309-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.186", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d4ee3bf60f3a4d8f99f2fa20b6f13792", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc999665e-f1", "ovs_interfaceid": "c999665e-f15e-46cf-9d3c-b7252ab6a96a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.002992] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833689, 'name': ReconfigVM_Task, 'duration_secs': 0.188276} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.006196] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 856.006703] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a7726b65-fff4-48f8-b1b8-c62cd68afb86 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.014596] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 856.014596] env[68638]: value = "task-2833691" [ 856.014596] env[68638]: _type = "Task" [ 856.014596] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.026101] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833691, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.044032] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Releasing lock "refresh_cache-4c954bb4-6291-47d5-a65c-0ad92a0fd193" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 856.044032] env[68638]: DEBUG nova.compute.manager [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Instance network_info: |[{"id": "fb14533d-1dc9-4440-a62d-ab3ca16bc7f1", "address": "fa:16:3e:f9:6a:25", "network": {"id": "5de0e424-8bf1-4515-8c49-06607ad85c61", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1760008184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc24eaf6cf74d539558c0a736e18c3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb14533d-1d", "ovs_interfaceid": "fb14533d-1dc9-4440-a62d-ab3ca16bc7f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 856.044370] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:6a:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08fb4857-7f9b-4f97-86ef-415341fb595d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fb14533d-1dc9-4440-a62d-ab3ca16bc7f1', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 856.052434] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 856.052691] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 856.055771] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0eb9ae0f-b8f3-4715-b2be-5d05e5b21e95 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.083429] env[68638]: DEBUG oslo_vmware.api [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833690, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.088761] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 856.088761] env[68638]: value = "task-2833692" [ 856.088761] env[68638]: _type = "Task" [ 856.088761] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.091767] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "d2d30008-5058-4be3-b803-00d8ca4450d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 856.092021] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "d2d30008-5058-4be3-b803-00d8ca4450d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 856.101193] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833692, 'name': CreateVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.239909] env[68638]: DEBUG nova.compute.manager [req-ca83ddf3-3fad-4253-b8b8-9338c588bc9e req-fd6031be-eb03-4273-b06c-86770a4883ea service nova] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Received event network-changed-fb14533d-1dc9-4440-a62d-ab3ca16bc7f1 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 856.240196] env[68638]: DEBUG nova.compute.manager [req-ca83ddf3-3fad-4253-b8b8-9338c588bc9e req-fd6031be-eb03-4273-b06c-86770a4883ea service nova] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Refreshing instance network info cache due to event network-changed-fb14533d-1dc9-4440-a62d-ab3ca16bc7f1. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 856.240380] env[68638]: DEBUG oslo_concurrency.lockutils [req-ca83ddf3-3fad-4253-b8b8-9338c588bc9e req-fd6031be-eb03-4273-b06c-86770a4883ea service nova] Acquiring lock "refresh_cache-4c954bb4-6291-47d5-a65c-0ad92a0fd193" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.240555] env[68638]: DEBUG oslo_concurrency.lockutils [req-ca83ddf3-3fad-4253-b8b8-9338c588bc9e req-fd6031be-eb03-4273-b06c-86770a4883ea service nova] Acquired lock "refresh_cache-4c954bb4-6291-47d5-a65c-0ad92a0fd193" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 856.240700] env[68638]: DEBUG nova.network.neutron [req-ca83ddf3-3fad-4253-b8b8-9338c588bc9e req-fd6031be-eb03-4273-b06c-86770a4883ea service nova] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Refreshing network info cache for port fb14533d-1dc9-4440-a62d-ab3ca16bc7f1 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 856.291389] env[68638]: DEBUG oslo_concurrency.lockutils [req-9dd5dc70-2136-46e9-8194-38d87aed6f6d req-0fa961a9-4d57-411d-87d2-a98591f459b4 service nova] Releasing lock "refresh_cache-02894a47-59b1-475b-b934-c8d0b6dabc5b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 856.336147] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144298a2-8ee7-432a-9b18-14447812b9bc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.345235] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9a20b4-1aff-4a08-80d6-8a935dc046cb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.377872] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d250f1-dec8-4c38-b881-8f29d77f61f3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.387796] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87902d17-121d-4db7-ab06-e39a567a6808 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.403667] env[68638]: DEBUG nova.compute.provider_tree [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 856.525083] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833691, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.583022] env[68638]: DEBUG oslo_vmware.api [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833690, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.602985] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833692, 'name': CreateVM_Task, 'duration_secs': 0.392636} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.605027] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 856.605027] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.605027] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 856.605027] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 856.605027] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ba799b6-65b2-4dea-8d82-0b85a3f5727c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.610025] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 856.610025] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52aeb81d-8b3f-9234-9bed-da7bbaf694ed" [ 856.610025] env[68638]: _type = "Task" [ 856.610025] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.617679] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52aeb81d-8b3f-9234-9bed-da7bbaf694ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.906894] env[68638]: DEBUG nova.scheduler.client.report [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 856.963086] env[68638]: DEBUG nova.network.neutron [req-ca83ddf3-3fad-4253-b8b8-9338c588bc9e req-fd6031be-eb03-4273-b06c-86770a4883ea service nova] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Updated VIF entry in instance network info cache for port fb14533d-1dc9-4440-a62d-ab3ca16bc7f1. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 856.963468] env[68638]: DEBUG nova.network.neutron [req-ca83ddf3-3fad-4253-b8b8-9338c588bc9e req-fd6031be-eb03-4273-b06c-86770a4883ea service nova] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Updating instance_info_cache with network_info: [{"id": "fb14533d-1dc9-4440-a62d-ab3ca16bc7f1", "address": "fa:16:3e:f9:6a:25", "network": {"id": "5de0e424-8bf1-4515-8c49-06607ad85c61", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1760008184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc24eaf6cf74d539558c0a736e18c3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb14533d-1d", "ovs_interfaceid": "fb14533d-1dc9-4440-a62d-ab3ca16bc7f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.026232] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833691, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.082065] env[68638]: DEBUG oslo_vmware.api [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833690, 'name': CreateSnapshot_Task, 'duration_secs': 1.205175} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.082065] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Created Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 857.082584] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8629afd-3ee3-4b1d-aa96-f8620f968f0e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.119987] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52aeb81d-8b3f-9234-9bed-da7bbaf694ed, 'name': SearchDatastore_Task, 'duration_secs': 0.011379} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.120339] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 857.120574] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 857.120810] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.120959] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 857.121156] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 857.121411] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1de40536-c2e0-42c3-a665-2bd5b4cf040f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.130192] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 857.130388] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 857.131193] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e42bc426-5074-4ad2-9751-bee92447c5e7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.136932] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 857.136932] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52c8c04d-1483-b201-f730-f6b8eeba5d74" [ 857.136932] env[68638]: _type = "Task" [ 857.136932] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.144406] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52c8c04d-1483-b201-f730-f6b8eeba5d74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.412016] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.563s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 857.412554] env[68638]: DEBUG nova.compute.manager [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 857.415406] env[68638]: DEBUG oslo_concurrency.lockutils [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 51.599s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 857.416855] env[68638]: INFO nova.compute.claims [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 857.465793] env[68638]: DEBUG oslo_concurrency.lockutils [req-ca83ddf3-3fad-4253-b8b8-9338c588bc9e req-fd6031be-eb03-4273-b06c-86770a4883ea service nova] Releasing lock "refresh_cache-4c954bb4-6291-47d5-a65c-0ad92a0fd193" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 857.527098] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833691, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.601694] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Creating linked-clone VM from snapshot {{(pid=68638) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 857.602124] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-03743387-3c5c-47fe-b65a-1ffd9b408700 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.611319] env[68638]: DEBUG oslo_vmware.api [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 857.611319] env[68638]: value = "task-2833693" [ 857.611319] env[68638]: _type = "Task" [ 857.611319] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.620212] env[68638]: DEBUG oslo_vmware.api [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833693, 'name': CloneVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.647329] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52c8c04d-1483-b201-f730-f6b8eeba5d74, 'name': SearchDatastore_Task, 'duration_secs': 0.009289} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.648168] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd35bf14-61a3-4627-b8c0-41da29fac9a9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.654033] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 857.654033] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]522ee5d5-85fc-f9ca-756c-013a6f09635d" [ 857.654033] env[68638]: _type = "Task" [ 857.654033] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.662444] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]522ee5d5-85fc-f9ca-756c-013a6f09635d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.921116] env[68638]: DEBUG nova.compute.utils [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 857.926783] env[68638]: DEBUG nova.compute.manager [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 857.926783] env[68638]: DEBUG nova.network.neutron [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 857.979982] env[68638]: DEBUG nova.policy [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '075b1dab9233409390d346c7bbfa3d4e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'efa342b9d9a34e9e8e708c8f356f905e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 858.029436] env[68638]: DEBUG oslo_vmware.api [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833691, 'name': PowerOnVM_Task, 'duration_secs': 1.573681} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.029695] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 858.032592] env[68638]: DEBUG nova.compute.manager [None req-56821c37-e240-4fd8-9900-b4c2408c6e99 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 858.033432] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29718984-55b8-4ccb-a3d3-7c86fb51ac6f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.122379] env[68638]: DEBUG oslo_vmware.api [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833693, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.165591] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]522ee5d5-85fc-f9ca-756c-013a6f09635d, 'name': SearchDatastore_Task, 'duration_secs': 0.033728} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.165814] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 858.166092] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 4c954bb4-6291-47d5-a65c-0ad92a0fd193/4c954bb4-6291-47d5-a65c-0ad92a0fd193.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 858.166351] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6446fb76-8feb-4a75-8fd5-f7efd108d2c4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.173493] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 858.173493] env[68638]: value = "task-2833694" [ 858.173493] env[68638]: _type = "Task" [ 858.173493] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.181650] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833694, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.336913] env[68638]: DEBUG nova.network.neutron [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Successfully created port: 05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 858.426976] env[68638]: DEBUG nova.compute.manager [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 858.623907] env[68638]: DEBUG oslo_vmware.api [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833693, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.693058] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833694, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.511767} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.693058] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 4c954bb4-6291-47d5-a65c-0ad92a0fd193/4c954bb4-6291-47d5-a65c-0ad92a0fd193.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 858.693436] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 858.694011] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8d6db4f5-a42e-4289-96f7-7c30c6841e9a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.702915] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 858.702915] env[68638]: value = "task-2833695" [ 858.702915] env[68638]: _type = "Task" [ 858.702915] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.714048] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833695, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.969110] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1023f7-2b01-4e57-8eb1-92553f0ee3a4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.976596] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03537100-73eb-4d48-8d21-e0232d90b42c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.012703] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54309a11-399a-44a9-a6c1-a49e50e43856 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.020986] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc7ffa27-dcba-48d5-b5ba-691f99688ae0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.036777] env[68638]: DEBUG nova.compute.provider_tree [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 859.123773] env[68638]: DEBUG oslo_vmware.api [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833693, 'name': CloneVM_Task} progress is 95%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.213368] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833695, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072205} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.213648] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 859.214546] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abd949c9-9344-4562-8d9e-a65637cdf921 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.236998] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 4c954bb4-6291-47d5-a65c-0ad92a0fd193/4c954bb4-6291-47d5-a65c-0ad92a0fd193.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 859.237303] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36869c93-2076-4b4a-9691-12f5ffdf9091 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.257333] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 859.257333] env[68638]: value = "task-2833696" [ 859.257333] env[68638]: _type = "Task" [ 859.257333] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.265597] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833696, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.418996] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquiring lock "9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.419257] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Lock "9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.442363] env[68638]: DEBUG nova.compute.manager [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 859.464645] env[68638]: DEBUG nova.virt.hardware [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 859.464901] env[68638]: DEBUG nova.virt.hardware [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 859.465094] env[68638]: DEBUG nova.virt.hardware [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 859.465284] env[68638]: DEBUG nova.virt.hardware [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 859.465433] env[68638]: DEBUG nova.virt.hardware [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 859.465582] env[68638]: DEBUG nova.virt.hardware [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 859.465789] env[68638]: DEBUG nova.virt.hardware [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 859.465951] env[68638]: DEBUG nova.virt.hardware [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 859.466197] env[68638]: DEBUG nova.virt.hardware [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 859.466312] env[68638]: DEBUG nova.virt.hardware [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 859.466487] env[68638]: DEBUG nova.virt.hardware [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 859.467340] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e201a44e-3dac-4f42-9763-5b1a444e78a8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.475719] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e0669e-c3c0-4d38-96a5-c7d4c3db4571 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.539682] env[68638]: DEBUG nova.scheduler.client.report [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 859.625731] env[68638]: DEBUG oslo_vmware.api [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833693, 'name': CloneVM_Task, 'duration_secs': 1.681633} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.625999] env[68638]: INFO nova.virt.vmwareapi.vmops [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Created linked-clone VM from snapshot [ 859.626826] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba03799-a840-41cd-a7ba-53ce55b905d3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.634385] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Uploading image e06fa969-493e-41dc-bcf6-86e7dafe726d {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 859.654842] env[68638]: DEBUG oslo_vmware.rw_handles [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 859.654842] env[68638]: value = "vm-569917" [ 859.654842] env[68638]: _type = "VirtualMachine" [ 859.654842] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 859.655141] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-3a45b1d7-0e24-4825-b03d-20137b20cedd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.664176] env[68638]: DEBUG oslo_vmware.rw_handles [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lease: (returnval){ [ 859.664176] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52083b2e-dfde-1a81-6de1-5ca81fd51959" [ 859.664176] env[68638]: _type = "HttpNfcLease" [ 859.664176] env[68638]: } obtained for exporting VM: (result){ [ 859.664176] env[68638]: value = "vm-569917" [ 859.664176] env[68638]: _type = "VirtualMachine" [ 859.664176] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 859.664451] env[68638]: DEBUG oslo_vmware.api [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the lease: (returnval){ [ 859.664451] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52083b2e-dfde-1a81-6de1-5ca81fd51959" [ 859.664451] env[68638]: _type = "HttpNfcLease" [ 859.664451] env[68638]: } to be ready. {{(pid=68638) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 859.671060] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 859.671060] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52083b2e-dfde-1a81-6de1-5ca81fd51959" [ 859.671060] env[68638]: _type = "HttpNfcLease" [ 859.671060] env[68638]: } is initializing. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 859.772627] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833696, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.934185] env[68638]: DEBUG nova.compute.manager [req-fe56da77-ee1a-407e-b1c8-158668594ae0 req-193e2cc8-17c4-4350-b32a-d3c2cc6a0a38 service nova] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Received event network-vif-plugged-05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 859.934419] env[68638]: DEBUG oslo_concurrency.lockutils [req-fe56da77-ee1a-407e-b1c8-158668594ae0 req-193e2cc8-17c4-4350-b32a-d3c2cc6a0a38 service nova] Acquiring lock "14c1dba5-98cb-4ebd-8e76-60b3f74cca4b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.934597] env[68638]: DEBUG oslo_concurrency.lockutils [req-fe56da77-ee1a-407e-b1c8-158668594ae0 req-193e2cc8-17c4-4350-b32a-d3c2cc6a0a38 service nova] Lock "14c1dba5-98cb-4ebd-8e76-60b3f74cca4b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.934795] env[68638]: DEBUG oslo_concurrency.lockutils [req-fe56da77-ee1a-407e-b1c8-158668594ae0 req-193e2cc8-17c4-4350-b32a-d3c2cc6a0a38 service nova] Lock "14c1dba5-98cb-4ebd-8e76-60b3f74cca4b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 859.934976] env[68638]: DEBUG nova.compute.manager [req-fe56da77-ee1a-407e-b1c8-158668594ae0 req-193e2cc8-17c4-4350-b32a-d3c2cc6a0a38 service nova] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] No waiting events found dispatching network-vif-plugged-05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 859.935156] env[68638]: WARNING nova.compute.manager [req-fe56da77-ee1a-407e-b1c8-158668594ae0 req-193e2cc8-17c4-4350-b32a-d3c2cc6a0a38 service nova] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Received unexpected event network-vif-plugged-05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0 for instance with vm_state building and task_state spawning. [ 860.044659] env[68638]: DEBUG oslo_concurrency.lockutils [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.629s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.045290] env[68638]: DEBUG nova.compute.manager [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 860.048517] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.284s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 860.051142] env[68638]: INFO nova.compute.claims [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 860.072420] env[68638]: DEBUG nova.network.neutron [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Successfully updated port: 05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 860.173651] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 860.173651] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52083b2e-dfde-1a81-6de1-5ca81fd51959" [ 860.173651] env[68638]: _type = "HttpNfcLease" [ 860.173651] env[68638]: } is ready. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 860.174087] env[68638]: DEBUG oslo_vmware.rw_handles [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 860.174087] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52083b2e-dfde-1a81-6de1-5ca81fd51959" [ 860.174087] env[68638]: _type = "HttpNfcLease" [ 860.174087] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 860.174933] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b14fa6c-9e8c-414c-b46d-a51c6f114360 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.182974] env[68638]: DEBUG oslo_vmware.rw_handles [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526feb9e-2a5b-0170-57a5-dbc749326595/disk-0.vmdk from lease info. {{(pid=68638) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 860.183224] env[68638]: DEBUG oslo_vmware.rw_handles [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526feb9e-2a5b-0170-57a5-dbc749326595/disk-0.vmdk for reading. {{(pid=68638) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 860.269357] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833696, 'name': ReconfigVM_Task, 'duration_secs': 0.762895} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.269947] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 4c954bb4-6291-47d5-a65c-0ad92a0fd193/4c954bb4-6291-47d5-a65c-0ad92a0fd193.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 860.270668] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-494911a2-f94f-4a05-a555-b99bb69db302 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.277838] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 860.277838] env[68638]: value = "task-2833698" [ 860.277838] env[68638]: _type = "Task" [ 860.277838] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.288522] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833698, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.364634] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-bf5e2ed4-42b8-4ea7-b159-eb16cbf39eb1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.554608] env[68638]: DEBUG nova.compute.utils [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 860.563147] env[68638]: DEBUG nova.compute.manager [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 860.563147] env[68638]: DEBUG nova.network.neutron [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 860.578571] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "refresh_cache-14c1dba5-98cb-4ebd-8e76-60b3f74cca4b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.578736] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired lock "refresh_cache-14c1dba5-98cb-4ebd-8e76-60b3f74cca4b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 860.578903] env[68638]: DEBUG nova.network.neutron [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 860.616148] env[68638]: DEBUG nova.policy [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '72b944a0d853408fa82d313bdc7b8bac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '269a5618b37e42189dca254a5073c269', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 860.788676] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833698, 'name': Rename_Task, 'duration_secs': 0.251807} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.789065] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 860.789683] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f54d8166-471b-4780-823c-108986ff6464 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.798276] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 860.798276] env[68638]: value = "task-2833699" [ 860.798276] env[68638]: _type = "Task" [ 860.798276] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.806969] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833699, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.917898] env[68638]: DEBUG nova.network.neutron [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Successfully created port: 0c7f7cbe-6520-4959-82ff-39bd93c26cb4 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 861.063084] env[68638]: DEBUG nova.compute.manager [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 861.134786] env[68638]: DEBUG nova.network.neutron [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 861.311162] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833699, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.341407] env[68638]: DEBUG nova.network.neutron [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Updating instance_info_cache with network_info: [{"id": "05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0", "address": "fa:16:3e:31:46:09", "network": {"id": "2181efd7-a094-4c4b-8754-da82e89be85a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1274773453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "efa342b9d9a34e9e8e708c8f356f905e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05aa9a68-9c", "ovs_interfaceid": "05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.667243] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e1af461-f0c6-492a-ac7d-fc3360430737 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.676759] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-634ef129-ba59-4d7b-b683-c89c7a67bfe0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.711198] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb7cd04-86b9-4300-b2f5-903757c50718 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.719789] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c64ee5-ae9c-442b-a773-fd84421da502 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.733672] env[68638]: DEBUG nova.compute.provider_tree [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 861.809721] env[68638]: DEBUG oslo_vmware.api [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2833699, 'name': PowerOnVM_Task, 'duration_secs': 0.726815} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.810054] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 861.810347] env[68638]: INFO nova.compute.manager [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Took 8.35 seconds to spawn the instance on the hypervisor. [ 861.810605] env[68638]: DEBUG nova.compute.manager [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 861.811536] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b2185f-2b49-4afa-8294-0075fb9e3460 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.847164] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Releasing lock "refresh_cache-14c1dba5-98cb-4ebd-8e76-60b3f74cca4b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 861.847764] env[68638]: DEBUG nova.compute.manager [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Instance network_info: |[{"id": "05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0", "address": "fa:16:3e:31:46:09", "network": {"id": "2181efd7-a094-4c4b-8754-da82e89be85a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1274773453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "efa342b9d9a34e9e8e708c8f356f905e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05aa9a68-9c", "ovs_interfaceid": "05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 861.848270] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:46:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69cfa7ba-6989-4d75-9495-97b5fea00c3c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 861.856459] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 861.856780] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 861.857087] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-078ce6b5-4d59-4747-a37d-c32d29281699 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.881017] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 861.881017] env[68638]: value = "task-2833700" [ 861.881017] env[68638]: _type = "Task" [ 861.881017] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.894011] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833700, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.961733] env[68638]: DEBUG nova.compute.manager [req-9e56a7df-7e15-49c6-a89f-62253223f8ec req-9b33cab7-3bda-4341-9874-5127a7bf44dd service nova] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Received event network-changed-05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 861.962745] env[68638]: DEBUG nova.compute.manager [req-9e56a7df-7e15-49c6-a89f-62253223f8ec req-9b33cab7-3bda-4341-9874-5127a7bf44dd service nova] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Refreshing instance network info cache due to event network-changed-05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 861.962745] env[68638]: DEBUG oslo_concurrency.lockutils [req-9e56a7df-7e15-49c6-a89f-62253223f8ec req-9b33cab7-3bda-4341-9874-5127a7bf44dd service nova] Acquiring lock "refresh_cache-14c1dba5-98cb-4ebd-8e76-60b3f74cca4b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.964421] env[68638]: DEBUG oslo_concurrency.lockutils [req-9e56a7df-7e15-49c6-a89f-62253223f8ec req-9b33cab7-3bda-4341-9874-5127a7bf44dd service nova] Acquired lock "refresh_cache-14c1dba5-98cb-4ebd-8e76-60b3f74cca4b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 861.964421] env[68638]: DEBUG nova.network.neutron [req-9e56a7df-7e15-49c6-a89f-62253223f8ec req-9b33cab7-3bda-4341-9874-5127a7bf44dd service nova] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Refreshing network info cache for port 05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 862.073810] env[68638]: DEBUG nova.compute.manager [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 862.101927] env[68638]: DEBUG nova.virt.hardware [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 862.102081] env[68638]: DEBUG nova.virt.hardware [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 862.102333] env[68638]: DEBUG nova.virt.hardware [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 862.102605] env[68638]: DEBUG nova.virt.hardware [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 862.102770] env[68638]: DEBUG nova.virt.hardware [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 862.103041] env[68638]: DEBUG nova.virt.hardware [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 862.104055] env[68638]: DEBUG nova.virt.hardware [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 862.104055] env[68638]: DEBUG nova.virt.hardware [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 862.104055] env[68638]: DEBUG nova.virt.hardware [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 862.104055] env[68638]: DEBUG nova.virt.hardware [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 862.104055] env[68638]: DEBUG nova.virt.hardware [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 862.105881] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e71d42e6-8e3e-4349-ae6f-4d0eaaee9ac7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.114714] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87fc5c18-0022-486e-bffe-68c1fafd23f4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.237253] env[68638]: DEBUG nova.scheduler.client.report [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 862.330760] env[68638]: INFO nova.compute.manager [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Took 65.78 seconds to build instance. [ 862.398028] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833700, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.725213] env[68638]: DEBUG nova.network.neutron [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Successfully updated port: 0c7f7cbe-6520-4959-82ff-39bd93c26cb4 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 862.742476] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.694s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 862.743102] env[68638]: DEBUG nova.compute.manager [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 862.747156] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.843s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 862.750557] env[68638]: INFO nova.compute.claims [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 862.757033] env[68638]: DEBUG nova.network.neutron [req-9e56a7df-7e15-49c6-a89f-62253223f8ec req-9b33cab7-3bda-4341-9874-5127a7bf44dd service nova] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Updated VIF entry in instance network info cache for port 05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 862.757033] env[68638]: DEBUG nova.network.neutron [req-9e56a7df-7e15-49c6-a89f-62253223f8ec req-9b33cab7-3bda-4341-9874-5127a7bf44dd service nova] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Updating instance_info_cache with network_info: [{"id": "05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0", "address": "fa:16:3e:31:46:09", "network": {"id": "2181efd7-a094-4c4b-8754-da82e89be85a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1274773453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "efa342b9d9a34e9e8e708c8f356f905e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05aa9a68-9c", "ovs_interfaceid": "05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.833171] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a0b70fc-cd12-46f6-b0a1-0cd4b3bc52d3 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "4c954bb4-6291-47d5-a65c-0ad92a0fd193" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.852s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 862.892690] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833700, 'name': CreateVM_Task, 'duration_secs': 0.894081} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.892881] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 862.893621] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.893826] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 862.894164] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 862.894455] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08cbdad7-6c92-4e31-86eb-eeaa37df3f1f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.900032] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 862.900032] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]526de705-9659-311e-fe05-e550a4d1ca4c" [ 862.900032] env[68638]: _type = "Task" [ 862.900032] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.910852] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]526de705-9659-311e-fe05-e550a4d1ca4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.229089] env[68638]: DEBUG oslo_concurrency.lockutils [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquiring lock "refresh_cache-17f6cd0a-bbc1-47c3-9c36-2166ba448de2" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.229265] env[68638]: DEBUG oslo_concurrency.lockutils [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquired lock "refresh_cache-17f6cd0a-bbc1-47c3-9c36-2166ba448de2" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 863.229424] env[68638]: DEBUG nova.network.neutron [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 863.248393] env[68638]: DEBUG nova.compute.utils [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 863.249671] env[68638]: DEBUG nova.compute.manager [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 863.249849] env[68638]: DEBUG nova.network.neutron [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 863.260896] env[68638]: DEBUG oslo_concurrency.lockutils [req-9e56a7df-7e15-49c6-a89f-62253223f8ec req-9b33cab7-3bda-4341-9874-5127a7bf44dd service nova] Releasing lock "refresh_cache-14c1dba5-98cb-4ebd-8e76-60b3f74cca4b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.300013] env[68638]: DEBUG nova.policy [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e66d8cfbe6c41bc90baaf1e7eb23a86', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ded98d5a15c54e01b752c52b88549b3e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 863.336496] env[68638]: DEBUG nova.compute.manager [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 863.412210] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]526de705-9659-311e-fe05-e550a4d1ca4c, 'name': SearchDatastore_Task, 'duration_secs': 0.018442} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.412544] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.412783] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 863.413032] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.413265] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 863.413423] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 863.413622] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a9115d1a-e91d-48c2-be6b-dee55b6c3adf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.424553] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 863.424757] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 863.425555] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6d98e67-f21c-4160-b276-6c1d922b47b5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.431745] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 863.431745] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]521dfc68-263b-dc5d-997c-e7ccf89b2337" [ 863.431745] env[68638]: _type = "Task" [ 863.431745] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.451518] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]521dfc68-263b-dc5d-997c-e7ccf89b2337, 'name': SearchDatastore_Task, 'duration_secs': 0.016513} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.452407] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86fff4f2-643e-4954-9bac-4a0d6eb7b824 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.458670] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 863.458670] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52acb14c-4eef-330d-b55e-4d456d376cf8" [ 863.458670] env[68638]: _type = "Task" [ 863.458670] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.468178] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52acb14c-4eef-330d-b55e-4d456d376cf8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.642040] env[68638]: DEBUG nova.network.neutron [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Successfully created port: 74220954-1e9b-4dd4-a7a3-6a799a426d21 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 863.756562] env[68638]: DEBUG nova.compute.manager [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 863.775675] env[68638]: DEBUG nova.network.neutron [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 863.857104] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 863.937281] env[68638]: DEBUG nova.network.neutron [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Updating instance_info_cache with network_info: [{"id": "0c7f7cbe-6520-4959-82ff-39bd93c26cb4", "address": "fa:16:3e:75:d1:21", "network": {"id": "26f8ee83-eaa2-4da8-8f5d-7de1d08f75ef", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-344816381-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "269a5618b37e42189dca254a5073c269", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33fdc099-7497-41c1-b40c-1558937132d4", "external-id": "nsx-vlan-transportzone-764", "segmentation_id": 764, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c7f7cbe-65", "ovs_interfaceid": "0c7f7cbe-6520-4959-82ff-39bd93c26cb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.970676] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52acb14c-4eef-330d-b55e-4d456d376cf8, 'name': SearchDatastore_Task, 'duration_secs': 0.017289} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.973342] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.973601] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b/14c1dba5-98cb-4ebd-8e76-60b3f74cca4b.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 863.974364] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-034f0110-4377-4c91-846d-c6282fd791e5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.983152] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 863.983152] env[68638]: value = "task-2833701" [ 863.983152] env[68638]: _type = "Task" [ 863.983152] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.993841] env[68638]: DEBUG nova.compute.manager [req-4163cc52-90f3-43ba-bb5a-705900e63b49 req-4ed2daf5-75ed-4af8-bd64-a73b15de498d service nova] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Received event network-vif-plugged-0c7f7cbe-6520-4959-82ff-39bd93c26cb4 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 863.994120] env[68638]: DEBUG oslo_concurrency.lockutils [req-4163cc52-90f3-43ba-bb5a-705900e63b49 req-4ed2daf5-75ed-4af8-bd64-a73b15de498d service nova] Acquiring lock "17f6cd0a-bbc1-47c3-9c36-2166ba448de2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 863.994365] env[68638]: DEBUG oslo_concurrency.lockutils [req-4163cc52-90f3-43ba-bb5a-705900e63b49 req-4ed2daf5-75ed-4af8-bd64-a73b15de498d service nova] Lock "17f6cd0a-bbc1-47c3-9c36-2166ba448de2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.995024] env[68638]: DEBUG oslo_concurrency.lockutils [req-4163cc52-90f3-43ba-bb5a-705900e63b49 req-4ed2daf5-75ed-4af8-bd64-a73b15de498d service nova] Lock "17f6cd0a-bbc1-47c3-9c36-2166ba448de2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.995024] env[68638]: DEBUG nova.compute.manager [req-4163cc52-90f3-43ba-bb5a-705900e63b49 req-4ed2daf5-75ed-4af8-bd64-a73b15de498d service nova] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] No waiting events found dispatching network-vif-plugged-0c7f7cbe-6520-4959-82ff-39bd93c26cb4 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 863.995024] env[68638]: WARNING nova.compute.manager [req-4163cc52-90f3-43ba-bb5a-705900e63b49 req-4ed2daf5-75ed-4af8-bd64-a73b15de498d service nova] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Received unexpected event network-vif-plugged-0c7f7cbe-6520-4959-82ff-39bd93c26cb4 for instance with vm_state building and task_state spawning. [ 863.995208] env[68638]: DEBUG nova.compute.manager [req-4163cc52-90f3-43ba-bb5a-705900e63b49 req-4ed2daf5-75ed-4af8-bd64-a73b15de498d service nova] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Received event network-changed-0c7f7cbe-6520-4959-82ff-39bd93c26cb4 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 863.995379] env[68638]: DEBUG nova.compute.manager [req-4163cc52-90f3-43ba-bb5a-705900e63b49 req-4ed2daf5-75ed-4af8-bd64-a73b15de498d service nova] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Refreshing instance network info cache due to event network-changed-0c7f7cbe-6520-4959-82ff-39bd93c26cb4. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 863.995961] env[68638]: DEBUG oslo_concurrency.lockutils [req-4163cc52-90f3-43ba-bb5a-705900e63b49 req-4ed2daf5-75ed-4af8-bd64-a73b15de498d service nova] Acquiring lock "refresh_cache-17f6cd0a-bbc1-47c3-9c36-2166ba448de2" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.000166] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833701, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.284417] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f9b96c8-0148-44e5-b755-b920e080f29c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.294258] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d80b85e-dfad-4e29-8264-fb343f188008 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.332388] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be25127a-f0c6-4df0-8639-49cd3f42c8a5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.343548] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d13275-2e21-4c9e-8811-89c985e9d380 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.361236] env[68638]: DEBUG nova.compute.provider_tree [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.440629] env[68638]: DEBUG oslo_concurrency.lockutils [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Releasing lock "refresh_cache-17f6cd0a-bbc1-47c3-9c36-2166ba448de2" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 864.441027] env[68638]: DEBUG nova.compute.manager [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Instance network_info: |[{"id": "0c7f7cbe-6520-4959-82ff-39bd93c26cb4", "address": "fa:16:3e:75:d1:21", "network": {"id": "26f8ee83-eaa2-4da8-8f5d-7de1d08f75ef", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-344816381-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "269a5618b37e42189dca254a5073c269", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33fdc099-7497-41c1-b40c-1558937132d4", "external-id": "nsx-vlan-transportzone-764", "segmentation_id": 764, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c7f7cbe-65", "ovs_interfaceid": "0c7f7cbe-6520-4959-82ff-39bd93c26cb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 864.441393] env[68638]: DEBUG oslo_concurrency.lockutils [req-4163cc52-90f3-43ba-bb5a-705900e63b49 req-4ed2daf5-75ed-4af8-bd64-a73b15de498d service nova] Acquired lock "refresh_cache-17f6cd0a-bbc1-47c3-9c36-2166ba448de2" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 864.441705] env[68638]: DEBUG nova.network.neutron [req-4163cc52-90f3-43ba-bb5a-705900e63b49 req-4ed2daf5-75ed-4af8-bd64-a73b15de498d service nova] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Refreshing network info cache for port 0c7f7cbe-6520-4959-82ff-39bd93c26cb4 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 864.443350] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:d1:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33fdc099-7497-41c1-b40c-1558937132d4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0c7f7cbe-6520-4959-82ff-39bd93c26cb4', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 864.452125] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 864.452834] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 864.453056] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7aa99cda-579b-4a38-90f4-23bb505e5ac7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.477874] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 864.477874] env[68638]: value = "task-2833702" [ 864.477874] env[68638]: _type = "Task" [ 864.477874] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.491602] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833702, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.498162] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833701, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.770430] env[68638]: DEBUG nova.compute.manager [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 864.794459] env[68638]: DEBUG nova.virt.hardware [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 864.794851] env[68638]: DEBUG nova.virt.hardware [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 864.795115] env[68638]: DEBUG nova.virt.hardware [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 864.795343] env[68638]: DEBUG nova.virt.hardware [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 864.795553] env[68638]: DEBUG nova.virt.hardware [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 864.795774] env[68638]: DEBUG nova.virt.hardware [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 864.796017] env[68638]: DEBUG nova.virt.hardware [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 864.796184] env[68638]: DEBUG nova.virt.hardware [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 864.796374] env[68638]: DEBUG nova.virt.hardware [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 864.796618] env[68638]: DEBUG nova.virt.hardware [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 864.796835] env[68638]: DEBUG nova.virt.hardware [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 864.797750] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4334b13d-369e-4eb1-bf46-e8f4f20c3fab {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.807425] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c3af01-6d19-49f6-834c-4ffd76ccdf7d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.865748] env[68638]: DEBUG nova.scheduler.client.report [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 864.996714] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833702, 'name': CreateVM_Task, 'duration_secs': 0.465893} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.000123] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 865.000423] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833701, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.529872} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.001174] env[68638]: DEBUG oslo_concurrency.lockutils [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.001808] env[68638]: DEBUG oslo_concurrency.lockutils [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.001808] env[68638]: DEBUG oslo_concurrency.lockutils [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 865.002165] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b/14c1dba5-98cb-4ebd-8e76-60b3f74cca4b.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 865.002594] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 865.002885] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79d8805c-2ae4-4398-8b16-095d44eb6460 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.004838] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a149c43b-83af-4156-a532-d585a6f5cf77 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.011382] env[68638]: DEBUG oslo_vmware.api [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for the task: (returnval){ [ 865.011382] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]525df7dc-2148-2879-8a81-e354ba4eb1f2" [ 865.011382] env[68638]: _type = "Task" [ 865.011382] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.016974] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 865.016974] env[68638]: value = "task-2833703" [ 865.016974] env[68638]: _type = "Task" [ 865.016974] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.024831] env[68638]: DEBUG oslo_vmware.api [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525df7dc-2148-2879-8a81-e354ba4eb1f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.034196] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833703, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.200313] env[68638]: DEBUG nova.compute.manager [req-b85148f9-4d23-444a-a533-5ca46e48778e req-422dd24c-9de6-4260-94b4-c41cc5fb6478 service nova] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Received event network-vif-plugged-74220954-1e9b-4dd4-a7a3-6a799a426d21 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 865.200716] env[68638]: DEBUG oslo_concurrency.lockutils [req-b85148f9-4d23-444a-a533-5ca46e48778e req-422dd24c-9de6-4260-94b4-c41cc5fb6478 service nova] Acquiring lock "6200613c-b5de-4774-b0c6-fdb78b4c7267-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.201193] env[68638]: DEBUG oslo_concurrency.lockutils [req-b85148f9-4d23-444a-a533-5ca46e48778e req-422dd24c-9de6-4260-94b4-c41cc5fb6478 service nova] Lock "6200613c-b5de-4774-b0c6-fdb78b4c7267-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.201584] env[68638]: DEBUG oslo_concurrency.lockutils [req-b85148f9-4d23-444a-a533-5ca46e48778e req-422dd24c-9de6-4260-94b4-c41cc5fb6478 service nova] Lock "6200613c-b5de-4774-b0c6-fdb78b4c7267-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.201584] env[68638]: DEBUG nova.compute.manager [req-b85148f9-4d23-444a-a533-5ca46e48778e req-422dd24c-9de6-4260-94b4-c41cc5fb6478 service nova] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] No waiting events found dispatching network-vif-plugged-74220954-1e9b-4dd4-a7a3-6a799a426d21 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 865.202217] env[68638]: WARNING nova.compute.manager [req-b85148f9-4d23-444a-a533-5ca46e48778e req-422dd24c-9de6-4260-94b4-c41cc5fb6478 service nova] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Received unexpected event network-vif-plugged-74220954-1e9b-4dd4-a7a3-6a799a426d21 for instance with vm_state building and task_state spawning. [ 865.222463] env[68638]: DEBUG nova.network.neutron [req-4163cc52-90f3-43ba-bb5a-705900e63b49 req-4ed2daf5-75ed-4af8-bd64-a73b15de498d service nova] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Updated VIF entry in instance network info cache for port 0c7f7cbe-6520-4959-82ff-39bd93c26cb4. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 865.222939] env[68638]: DEBUG nova.network.neutron [req-4163cc52-90f3-43ba-bb5a-705900e63b49 req-4ed2daf5-75ed-4af8-bd64-a73b15de498d service nova] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Updating instance_info_cache with network_info: [{"id": "0c7f7cbe-6520-4959-82ff-39bd93c26cb4", "address": "fa:16:3e:75:d1:21", "network": {"id": "26f8ee83-eaa2-4da8-8f5d-7de1d08f75ef", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-344816381-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "269a5618b37e42189dca254a5073c269", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33fdc099-7497-41c1-b40c-1558937132d4", "external-id": "nsx-vlan-transportzone-764", "segmentation_id": 764, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c7f7cbe-65", "ovs_interfaceid": "0c7f7cbe-6520-4959-82ff-39bd93c26cb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.279241] env[68638]: DEBUG nova.network.neutron [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Successfully updated port: 74220954-1e9b-4dd4-a7a3-6a799a426d21 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 865.372031] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.625s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.372656] env[68638]: DEBUG nova.compute.manager [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 865.375255] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 43.671s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.376021] env[68638]: DEBUG nova.objects.instance [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Lazy-loading 'resources' on Instance uuid 96848760-c8a0-43fa-ac7c-e6e56d6d6d83 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 865.524544] env[68638]: DEBUG oslo_vmware.api [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525df7dc-2148-2879-8a81-e354ba4eb1f2, 'name': SearchDatastore_Task, 'duration_secs': 0.013695} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.525188] env[68638]: DEBUG oslo_concurrency.lockutils [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 865.525425] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 865.525655] env[68638]: DEBUG oslo_concurrency.lockutils [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.525802] env[68638]: DEBUG oslo_concurrency.lockutils [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.526016] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 865.526393] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f7857b3c-2841-4ef8-9c99-054d6b2c8484 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.530829] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833703, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077973} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.531383] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 865.532174] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b2b690-eea1-43b7-a7e8-76c7fc20d0cf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.555346] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b/14c1dba5-98cb-4ebd-8e76-60b3f74cca4b.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 865.556913] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b748ef69-8fc8-4b25-a966-4b154973fbd2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.570906] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 865.571118] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 865.571837] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c19004ff-3b2e-4f2e-9b4b-9e44aeb4e3fe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.578125] env[68638]: DEBUG oslo_vmware.api [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for the task: (returnval){ [ 865.578125] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d629db-9e91-f613-b6fc-0db2316582dc" [ 865.578125] env[68638]: _type = "Task" [ 865.578125] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.579435] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 865.579435] env[68638]: value = "task-2833704" [ 865.579435] env[68638]: _type = "Task" [ 865.579435] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.590835] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833704, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.594132] env[68638]: DEBUG oslo_vmware.api [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d629db-9e91-f613-b6fc-0db2316582dc, 'name': SearchDatastore_Task, 'duration_secs': 0.011443} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.594898] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb16fed1-b8c9-4a88-9600-c7463d029440 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.600328] env[68638]: DEBUG oslo_vmware.api [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for the task: (returnval){ [ 865.600328] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5275e00c-5586-60d2-7587-7b3346295b00" [ 865.600328] env[68638]: _type = "Task" [ 865.600328] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.611048] env[68638]: DEBUG oslo_vmware.api [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5275e00c-5586-60d2-7587-7b3346295b00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.726224] env[68638]: DEBUG oslo_concurrency.lockutils [req-4163cc52-90f3-43ba-bb5a-705900e63b49 req-4ed2daf5-75ed-4af8-bd64-a73b15de498d service nova] Releasing lock "refresh_cache-17f6cd0a-bbc1-47c3-9c36-2166ba448de2" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 865.726575] env[68638]: DEBUG nova.compute.manager [req-4163cc52-90f3-43ba-bb5a-705900e63b49 req-4ed2daf5-75ed-4af8-bd64-a73b15de498d service nova] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Received event network-changed-fb14533d-1dc9-4440-a62d-ab3ca16bc7f1 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 865.726800] env[68638]: DEBUG nova.compute.manager [req-4163cc52-90f3-43ba-bb5a-705900e63b49 req-4ed2daf5-75ed-4af8-bd64-a73b15de498d service nova] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Refreshing instance network info cache due to event network-changed-fb14533d-1dc9-4440-a62d-ab3ca16bc7f1. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 865.727473] env[68638]: DEBUG oslo_concurrency.lockutils [req-4163cc52-90f3-43ba-bb5a-705900e63b49 req-4ed2daf5-75ed-4af8-bd64-a73b15de498d service nova] Acquiring lock "refresh_cache-4c954bb4-6291-47d5-a65c-0ad92a0fd193" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.727473] env[68638]: DEBUG oslo_concurrency.lockutils [req-4163cc52-90f3-43ba-bb5a-705900e63b49 req-4ed2daf5-75ed-4af8-bd64-a73b15de498d service nova] Acquired lock "refresh_cache-4c954bb4-6291-47d5-a65c-0ad92a0fd193" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.727473] env[68638]: DEBUG nova.network.neutron [req-4163cc52-90f3-43ba-bb5a-705900e63b49 req-4ed2daf5-75ed-4af8-bd64-a73b15de498d service nova] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Refreshing network info cache for port fb14533d-1dc9-4440-a62d-ab3ca16bc7f1 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 865.782657] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "refresh_cache-6200613c-b5de-4774-b0c6-fdb78b4c7267" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.783046] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquired lock "refresh_cache-6200613c-b5de-4774-b0c6-fdb78b4c7267" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.783046] env[68638]: DEBUG nova.network.neutron [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 865.878382] env[68638]: DEBUG nova.compute.utils [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 865.882077] env[68638]: DEBUG nova.compute.manager [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Not allocating networking since 'none' was specified. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 866.095709] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833704, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.111930] env[68638]: DEBUG oslo_vmware.api [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5275e00c-5586-60d2-7587-7b3346295b00, 'name': SearchDatastore_Task, 'duration_secs': 0.011984} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.114147] env[68638]: DEBUG oslo_concurrency.lockutils [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 866.114425] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 17f6cd0a-bbc1-47c3-9c36-2166ba448de2/17f6cd0a-bbc1-47c3-9c36-2166ba448de2.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 866.114923] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9cd53ab-b6a4-4520-a717-c475890e170f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.124344] env[68638]: DEBUG oslo_vmware.api [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for the task: (returnval){ [ 866.124344] env[68638]: value = "task-2833705" [ 866.124344] env[68638]: _type = "Task" [ 866.124344] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.137022] env[68638]: DEBUG oslo_vmware.api [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833705, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.322727] env[68638]: DEBUG nova.network.neutron [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 866.359895] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1090277e-c92f-4991-9bc3-c3b284ab3dc6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.370540] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a9a562c-abdb-4c36-b3cf-f70afbf5a83a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.405112] env[68638]: DEBUG nova.compute.manager [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 866.413826] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f82acf-ed49-484c-b953-e41ee1dc9b8a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.427733] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d33830f-9bbc-4e29-88a8-9090e171f0ab {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.445118] env[68638]: DEBUG nova.compute.provider_tree [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 866.563053] env[68638]: DEBUG nova.network.neutron [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Updating instance_info_cache with network_info: [{"id": "74220954-1e9b-4dd4-a7a3-6a799a426d21", "address": "fa:16:3e:64:27:8f", "network": {"id": "3cca37af-f3c4-433b-875a-8e01675c3975", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1292035020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ded98d5a15c54e01b752c52b88549b3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74220954-1e", "ovs_interfaceid": "74220954-1e9b-4dd4-a7a3-6a799a426d21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.599618] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833704, 'name': ReconfigVM_Task, 'duration_secs': 0.807383} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.600012] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b/14c1dba5-98cb-4ebd-8e76-60b3f74cca4b.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 866.600867] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-92120d93-984c-4d7e-a425-41f72443a37c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.610265] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 866.610265] env[68638]: value = "task-2833706" [ 866.610265] env[68638]: _type = "Task" [ 866.610265] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.621661] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833706, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.628432] env[68638]: DEBUG nova.network.neutron [req-4163cc52-90f3-43ba-bb5a-705900e63b49 req-4ed2daf5-75ed-4af8-bd64-a73b15de498d service nova] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Updated VIF entry in instance network info cache for port fb14533d-1dc9-4440-a62d-ab3ca16bc7f1. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 866.628874] env[68638]: DEBUG nova.network.neutron [req-4163cc52-90f3-43ba-bb5a-705900e63b49 req-4ed2daf5-75ed-4af8-bd64-a73b15de498d service nova] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Updating instance_info_cache with network_info: [{"id": "fb14533d-1dc9-4440-a62d-ab3ca16bc7f1", "address": "fa:16:3e:f9:6a:25", "network": {"id": "5de0e424-8bf1-4515-8c49-06607ad85c61", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1760008184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc24eaf6cf74d539558c0a736e18c3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb14533d-1d", "ovs_interfaceid": "fb14533d-1dc9-4440-a62d-ab3ca16bc7f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.640720] env[68638]: DEBUG oslo_vmware.api [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833705, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.948888] env[68638]: DEBUG nova.scheduler.client.report [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 867.065587] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Releasing lock "refresh_cache-6200613c-b5de-4774-b0c6-fdb78b4c7267" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.065930] env[68638]: DEBUG nova.compute.manager [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Instance network_info: |[{"id": "74220954-1e9b-4dd4-a7a3-6a799a426d21", "address": "fa:16:3e:64:27:8f", "network": {"id": "3cca37af-f3c4-433b-875a-8e01675c3975", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1292035020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ded98d5a15c54e01b752c52b88549b3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74220954-1e", "ovs_interfaceid": "74220954-1e9b-4dd4-a7a3-6a799a426d21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 867.066403] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:27:8f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3a80436-f7a9-431a-acec-aca3d76e3f9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '74220954-1e9b-4dd4-a7a3-6a799a426d21', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 867.074098] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 867.074339] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 867.074572] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dccd1d47-a0f7-4ed8-9a6c-931c0f7317b9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.095835] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 867.095835] env[68638]: value = "task-2833707" [ 867.095835] env[68638]: _type = "Task" [ 867.095835] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.104026] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833707, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.120389] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833706, 'name': Rename_Task, 'duration_secs': 0.395504} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.120682] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 867.120962] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-80bdb31e-e32c-4333-a0b7-a2ea5df44234 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.130941] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 867.130941] env[68638]: value = "task-2833708" [ 867.130941] env[68638]: _type = "Task" [ 867.130941] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.134458] env[68638]: DEBUG oslo_concurrency.lockutils [req-4163cc52-90f3-43ba-bb5a-705900e63b49 req-4ed2daf5-75ed-4af8-bd64-a73b15de498d service nova] Releasing lock "refresh_cache-4c954bb4-6291-47d5-a65c-0ad92a0fd193" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.144819] env[68638]: DEBUG oslo_vmware.api [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833705, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.521254} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.147934] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 17f6cd0a-bbc1-47c3-9c36-2166ba448de2/17f6cd0a-bbc1-47c3-9c36-2166ba448de2.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 867.148188] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 867.148468] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833708, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.149205] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ef756b96-7591-4b7b-b1b3-708ec593810d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.156799] env[68638]: DEBUG oslo_vmware.api [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for the task: (returnval){ [ 867.156799] env[68638]: value = "task-2833709" [ 867.156799] env[68638]: _type = "Task" [ 867.156799] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.166373] env[68638]: DEBUG oslo_vmware.api [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833709, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.247787] env[68638]: DEBUG nova.compute.manager [req-5fbeb007-f223-4a9d-9949-8fdb0426b58c req-a7ec2828-a384-469c-88d2-ee44f7fb5782 service nova] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Received event network-changed-74220954-1e9b-4dd4-a7a3-6a799a426d21 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 867.249175] env[68638]: DEBUG nova.compute.manager [req-5fbeb007-f223-4a9d-9949-8fdb0426b58c req-a7ec2828-a384-469c-88d2-ee44f7fb5782 service nova] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Refreshing instance network info cache due to event network-changed-74220954-1e9b-4dd4-a7a3-6a799a426d21. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 867.249175] env[68638]: DEBUG oslo_concurrency.lockutils [req-5fbeb007-f223-4a9d-9949-8fdb0426b58c req-a7ec2828-a384-469c-88d2-ee44f7fb5782 service nova] Acquiring lock "refresh_cache-6200613c-b5de-4774-b0c6-fdb78b4c7267" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.249175] env[68638]: DEBUG oslo_concurrency.lockutils [req-5fbeb007-f223-4a9d-9949-8fdb0426b58c req-a7ec2828-a384-469c-88d2-ee44f7fb5782 service nova] Acquired lock "refresh_cache-6200613c-b5de-4774-b0c6-fdb78b4c7267" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 867.249175] env[68638]: DEBUG nova.network.neutron [req-5fbeb007-f223-4a9d-9949-8fdb0426b58c req-a7ec2828-a384-469c-88d2-ee44f7fb5782 service nova] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Refreshing network info cache for port 74220954-1e9b-4dd4-a7a3-6a799a426d21 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 867.424286] env[68638]: DEBUG nova.compute.manager [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 867.449299] env[68638]: DEBUG nova.virt.hardware [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 867.449620] env[68638]: DEBUG nova.virt.hardware [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 867.449788] env[68638]: DEBUG nova.virt.hardware [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 867.449974] env[68638]: DEBUG nova.virt.hardware [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 867.450142] env[68638]: DEBUG nova.virt.hardware [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 867.450292] env[68638]: DEBUG nova.virt.hardware [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 867.450503] env[68638]: DEBUG nova.virt.hardware [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 867.450661] env[68638]: DEBUG nova.virt.hardware [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 867.450828] env[68638]: DEBUG nova.virt.hardware [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 867.451022] env[68638]: DEBUG nova.virt.hardware [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 867.451205] env[68638]: DEBUG nova.virt.hardware [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 867.452095] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec68faa-90ab-4fde-b6b4-6d738ea1ef77 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.455588] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.080s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.458057] env[68638]: DEBUG oslo_concurrency.lockutils [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.069s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.459545] env[68638]: INFO nova.compute.claims [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 867.470384] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f089d2-c5fd-4fa7-abc0-b55ccb94a417 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.485461] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Instance VIF info [] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 867.491182] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Creating folder: Project (4c962835494d41cbb403a4075ba6aabb). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 867.492310] env[68638]: INFO nova.scheduler.client.report [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Deleted allocations for instance 96848760-c8a0-43fa-ac7c-e6e56d6d6d83 [ 867.493556] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c662b304-6f7c-4a31-8d1f-9705fc40fe85 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.507561] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Created folder: Project (4c962835494d41cbb403a4075ba6aabb) in parent group-v569734. [ 867.507771] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Creating folder: Instances. Parent ref: group-v569921. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 867.508327] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f6233e37-c88c-43ad-b558-9849b5801596 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.520893] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Created folder: Instances in parent group-v569921. [ 867.521160] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 867.521367] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 867.521581] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6581b19b-e5dc-4238-a133-8b126513a2b1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.541204] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 867.541204] env[68638]: value = "task-2833712" [ 867.541204] env[68638]: _type = "Task" [ 867.541204] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.550462] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833712, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.606943] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833707, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.644856] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833708, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.667569] env[68638]: DEBUG oslo_vmware.api [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833709, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.177767} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.667857] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 867.668812] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea291de4-b2fe-4975-9359-797330655634 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.692857] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] 17f6cd0a-bbc1-47c3-9c36-2166ba448de2/17f6cd0a-bbc1-47c3-9c36-2166ba448de2.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 867.693183] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7caf4735-96bd-463f-a5f3-9a5f77956759 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.714566] env[68638]: DEBUG oslo_vmware.api [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for the task: (returnval){ [ 867.714566] env[68638]: value = "task-2833713" [ 867.714566] env[68638]: _type = "Task" [ 867.714566] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.723527] env[68638]: DEBUG oslo_vmware.api [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833713, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.982100] env[68638]: DEBUG nova.network.neutron [req-5fbeb007-f223-4a9d-9949-8fdb0426b58c req-a7ec2828-a384-469c-88d2-ee44f7fb5782 service nova] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Updated VIF entry in instance network info cache for port 74220954-1e9b-4dd4-a7a3-6a799a426d21. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 867.982485] env[68638]: DEBUG nova.network.neutron [req-5fbeb007-f223-4a9d-9949-8fdb0426b58c req-a7ec2828-a384-469c-88d2-ee44f7fb5782 service nova] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Updating instance_info_cache with network_info: [{"id": "74220954-1e9b-4dd4-a7a3-6a799a426d21", "address": "fa:16:3e:64:27:8f", "network": {"id": "3cca37af-f3c4-433b-875a-8e01675c3975", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1292035020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ded98d5a15c54e01b752c52b88549b3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74220954-1e", "ovs_interfaceid": "74220954-1e9b-4dd4-a7a3-6a799a426d21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.000608] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb458d5b-2dd3-48ee-a74b-3b73d80a282e tempest-ServerDiagnosticsNegativeTest-217229775 tempest-ServerDiagnosticsNegativeTest-217229775-project-member] Lock "96848760-c8a0-43fa-ac7c-e6e56d6d6d83" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.896s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.053185] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833712, 'name': CreateVM_Task, 'duration_secs': 0.402485} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.053381] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 868.053857] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.054051] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.054399] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 868.054686] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21819d01-7071-438b-87a4-7d2b0e8bfd31 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.060367] env[68638]: DEBUG oslo_vmware.api [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Waiting for the task: (returnval){ [ 868.060367] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]524d82ea-cbf0-d394-8826-50f17dae43bc" [ 868.060367] env[68638]: _type = "Task" [ 868.060367] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.071117] env[68638]: DEBUG oslo_vmware.api [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]524d82ea-cbf0-d394-8826-50f17dae43bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.107024] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833707, 'name': CreateVM_Task, 'duration_secs': 0.642982} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.107231] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 868.107962] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.147042] env[68638]: DEBUG oslo_vmware.api [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833708, 'name': PowerOnVM_Task, 'duration_secs': 0.923329} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.147335] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 868.147552] env[68638]: INFO nova.compute.manager [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Took 8.70 seconds to spawn the instance on the hypervisor. [ 868.147731] env[68638]: DEBUG nova.compute.manager [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 868.148578] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfc36898-4bd1-4f01-8866-b4ac0da71371 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.225850] env[68638]: DEBUG oslo_vmware.api [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833713, 'name': ReconfigVM_Task, 'duration_secs': 0.432069} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.226176] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Reconfigured VM instance instance-00000041 to attach disk [datastore1] 17f6cd0a-bbc1-47c3-9c36-2166ba448de2/17f6cd0a-bbc1-47c3-9c36-2166ba448de2.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 868.226845] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eb6c8e08-3913-41de-9784-fd036a9b1ee0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.234928] env[68638]: DEBUG oslo_vmware.api [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for the task: (returnval){ [ 868.234928] env[68638]: value = "task-2833714" [ 868.234928] env[68638]: _type = "Task" [ 868.234928] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.245074] env[68638]: DEBUG oslo_vmware.api [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833714, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.484989] env[68638]: DEBUG oslo_concurrency.lockutils [req-5fbeb007-f223-4a9d-9949-8fdb0426b58c req-a7ec2828-a384-469c-88d2-ee44f7fb5782 service nova] Releasing lock "refresh_cache-6200613c-b5de-4774-b0c6-fdb78b4c7267" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 868.574108] env[68638]: DEBUG oslo_vmware.api [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]524d82ea-cbf0-d394-8826-50f17dae43bc, 'name': SearchDatastore_Task, 'duration_secs': 0.012138} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.577482] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 868.577752] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 868.577979] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.578149] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.578330] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 868.578822] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.579137] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 868.579371] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-afd58d7a-73ad-4195-996d-bb1c7459249d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.582716] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-190a73c3-7201-4559-ba0a-d820f47e718d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.589057] env[68638]: DEBUG oslo_vmware.api [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 868.589057] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d3b011-e24d-503b-6ae8-20e30da1327d" [ 868.589057] env[68638]: _type = "Task" [ 868.589057] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.595568] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 868.595669] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 868.596449] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f95e1359-f845-4bd8-88a1-1891507ed3d8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.601583] env[68638]: DEBUG oslo_vmware.api [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d3b011-e24d-503b-6ae8-20e30da1327d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.605740] env[68638]: DEBUG oslo_vmware.api [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Waiting for the task: (returnval){ [ 868.605740] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528641a5-147c-7770-4636-2fd33e370072" [ 868.605740] env[68638]: _type = "Task" [ 868.605740] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.618532] env[68638]: DEBUG oslo_vmware.api [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528641a5-147c-7770-4636-2fd33e370072, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.669678] env[68638]: INFO nova.compute.manager [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Took 66.16 seconds to build instance. [ 868.750411] env[68638]: DEBUG oslo_vmware.api [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833714, 'name': Rename_Task, 'duration_secs': 0.250505} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.750682] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 868.750926] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-918c3cfe-008c-4439-b414-4c735c1e6d99 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.759859] env[68638]: DEBUG oslo_vmware.api [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for the task: (returnval){ [ 868.759859] env[68638]: value = "task-2833715" [ 868.759859] env[68638]: _type = "Task" [ 868.759859] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.778213] env[68638]: DEBUG oslo_vmware.api [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833715, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.017491] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cad0efb-dc13-46ce-9787-af15cd5e91f3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.026558] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d19fb05e-2996-48c7-a8d4-10fc6d6b974b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.061399] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed7195e8-5c9f-49e5-89ba-87389a429fa8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.070808] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f07f433b-b1eb-4c35-9742-e7173090b9f7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.086341] env[68638]: DEBUG nova.compute.provider_tree [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 869.101648] env[68638]: DEBUG oslo_vmware.api [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d3b011-e24d-503b-6ae8-20e30da1327d, 'name': SearchDatastore_Task, 'duration_secs': 0.016628} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.101648] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.101648] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 869.101648] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.108609] env[68638]: DEBUG nova.compute.manager [req-ddf97f43-89e1-4212-96ae-d88375bb0158 req-df45181b-a340-4126-ab8c-fcae1158fc3e service nova] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Received event network-changed-05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 869.108804] env[68638]: DEBUG nova.compute.manager [req-ddf97f43-89e1-4212-96ae-d88375bb0158 req-df45181b-a340-4126-ab8c-fcae1158fc3e service nova] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Refreshing instance network info cache due to event network-changed-05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 869.109027] env[68638]: DEBUG oslo_concurrency.lockutils [req-ddf97f43-89e1-4212-96ae-d88375bb0158 req-df45181b-a340-4126-ab8c-fcae1158fc3e service nova] Acquiring lock "refresh_cache-14c1dba5-98cb-4ebd-8e76-60b3f74cca4b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.109180] env[68638]: DEBUG oslo_concurrency.lockutils [req-ddf97f43-89e1-4212-96ae-d88375bb0158 req-df45181b-a340-4126-ab8c-fcae1158fc3e service nova] Acquired lock "refresh_cache-14c1dba5-98cb-4ebd-8e76-60b3f74cca4b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 869.109341] env[68638]: DEBUG nova.network.neutron [req-ddf97f43-89e1-4212-96ae-d88375bb0158 req-df45181b-a340-4126-ab8c-fcae1158fc3e service nova] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Refreshing network info cache for port 05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 869.122464] env[68638]: DEBUG oslo_vmware.api [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528641a5-147c-7770-4636-2fd33e370072, 'name': SearchDatastore_Task, 'duration_secs': 0.011643} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.124090] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44f1ff0b-07dd-469f-a094-909f5f14a317 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.131992] env[68638]: DEBUG oslo_vmware.api [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Waiting for the task: (returnval){ [ 869.131992] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]520ba25c-b73b-ee89-9635-8bc3f38cb6b5" [ 869.131992] env[68638]: _type = "Task" [ 869.131992] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.142116] env[68638]: DEBUG oslo_vmware.api [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]520ba25c-b73b-ee89-9635-8bc3f38cb6b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.172106] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cd20bd2e-5c0f-4605-8112-79d025607269 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "14c1dba5-98cb-4ebd-8e76-60b3f74cca4b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.564s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 869.272455] env[68638]: DEBUG oslo_vmware.api [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833715, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.618461] env[68638]: ERROR nova.scheduler.client.report [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [req-5f534c3f-ea7e-4598-9bbf-048e6060a8ff] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5f534c3f-ea7e-4598-9bbf-048e6060a8ff"}]} [ 869.638907] env[68638]: DEBUG nova.scheduler.client.report [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 869.648956] env[68638]: DEBUG oslo_vmware.api [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]520ba25c-b73b-ee89-9635-8bc3f38cb6b5, 'name': SearchDatastore_Task, 'duration_secs': 0.013401} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.648956] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.649116] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] c07f6e3a-86cf-4584-aa5e-5adc4bf086e3/c07f6e3a-86cf-4584-aa5e-5adc4bf086e3.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 869.649416] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 869.649542] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 869.649795] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-40993b40-f833-4cab-abdd-91a09d71d547 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.653083] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-647b0551-6357-4bc4-ab23-e747d05d3003 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.658630] env[68638]: DEBUG nova.scheduler.client.report [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 869.658630] env[68638]: DEBUG nova.compute.provider_tree [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 869.663101] env[68638]: DEBUG oslo_vmware.api [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Waiting for the task: (returnval){ [ 869.663101] env[68638]: value = "task-2833716" [ 869.663101] env[68638]: _type = "Task" [ 869.663101] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.663672] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 869.663900] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 869.669967] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae94eb61-0cea-4bec-aa4b-0a22b65c4b9b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.675618] env[68638]: DEBUG nova.compute.manager [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 869.689568] env[68638]: DEBUG nova.scheduler.client.report [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 869.693319] env[68638]: DEBUG oslo_vmware.api [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 869.693319] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52fc3f61-dcf1-32e5-f9d8-ed4991050de7" [ 869.693319] env[68638]: _type = "Task" [ 869.693319] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.693319] env[68638]: DEBUG oslo_vmware.api [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833716, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.710022] env[68638]: DEBUG oslo_vmware.api [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52fc3f61-dcf1-32e5-f9d8-ed4991050de7, 'name': SearchDatastore_Task, 'duration_secs': 0.012554} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.710022] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c735acf-3c4d-4de9-a3d5-12edd32a3aea {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.717181] env[68638]: DEBUG oslo_vmware.api [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 869.717181] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]522fd689-0bf9-8c30-8e1f-6420ad41c7c4" [ 869.717181] env[68638]: _type = "Task" [ 869.717181] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.721601] env[68638]: DEBUG nova.scheduler.client.report [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 869.735367] env[68638]: DEBUG oslo_vmware.api [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]522fd689-0bf9-8c30-8e1f-6420ad41c7c4, 'name': SearchDatastore_Task, 'duration_secs': 0.011233} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.738026] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.738026] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 6200613c-b5de-4774-b0c6-fdb78b4c7267/6200613c-b5de-4774-b0c6-fdb78b4c7267.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 869.738026] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fad64d16-9fb6-49a0-9432-953d7d11dfcc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.749648] env[68638]: DEBUG oslo_vmware.api [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 869.749648] env[68638]: value = "task-2833717" [ 869.749648] env[68638]: _type = "Task" [ 869.749648] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.759881] env[68638]: DEBUG oslo_vmware.api [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833717, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.775070] env[68638]: DEBUG oslo_vmware.api [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833715, 'name': PowerOnVM_Task, 'duration_secs': 0.569051} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.775388] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 869.775566] env[68638]: INFO nova.compute.manager [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Took 7.70 seconds to spawn the instance on the hypervisor. [ 869.775944] env[68638]: DEBUG nova.compute.manager [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 869.776623] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5f7d4ef-82a3-4137-b345-86d2f8bb40b5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.053777] env[68638]: DEBUG nova.network.neutron [req-ddf97f43-89e1-4212-96ae-d88375bb0158 req-df45181b-a340-4126-ab8c-fcae1158fc3e service nova] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Updated VIF entry in instance network info cache for port 05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 870.054283] env[68638]: DEBUG nova.network.neutron [req-ddf97f43-89e1-4212-96ae-d88375bb0158 req-df45181b-a340-4126-ab8c-fcae1158fc3e service nova] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Updating instance_info_cache with network_info: [{"id": "05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0", "address": "fa:16:3e:31:46:09", "network": {"id": "2181efd7-a094-4c4b-8754-da82e89be85a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1274773453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "efa342b9d9a34e9e8e708c8f356f905e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05aa9a68-9c", "ovs_interfaceid": "05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.178638] env[68638]: DEBUG oslo_vmware.api [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833716, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.214090] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 870.265211] env[68638]: DEBUG oslo_vmware.api [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833717, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.300906] env[68638]: INFO nova.compute.manager [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Took 64.50 seconds to build instance. [ 870.317142] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-942b8d75-ab2b-40f2-93c3-85698fda6099 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.329855] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a05ce72-3c14-4bea-963f-37aa96db06cb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.377047] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a58081-2dbc-478c-9e92-f6ba4a3f96f1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.388942] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3735e271-23b1-468c-bf31-02156ce645eb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.408281] env[68638]: DEBUG nova.compute.provider_tree [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 870.559243] env[68638]: DEBUG oslo_concurrency.lockutils [req-ddf97f43-89e1-4212-96ae-d88375bb0158 req-df45181b-a340-4126-ab8c-fcae1158fc3e service nova] Releasing lock "refresh_cache-14c1dba5-98cb-4ebd-8e76-60b3f74cca4b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 870.682011] env[68638]: DEBUG oslo_vmware.api [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833716, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.663954} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.684091] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] c07f6e3a-86cf-4584-aa5e-5adc4bf086e3/c07f6e3a-86cf-4584-aa5e-5adc4bf086e3.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 870.685764] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 870.685764] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-88bebccd-c056-456e-ae3d-668af1260eb2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.695892] env[68638]: DEBUG oslo_vmware.api [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Waiting for the task: (returnval){ [ 870.695892] env[68638]: value = "task-2833718" [ 870.695892] env[68638]: _type = "Task" [ 870.695892] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.711142] env[68638]: DEBUG oslo_vmware.api [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833718, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.762990] env[68638]: DEBUG oslo_vmware.api [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833717, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.800536} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.763157] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 6200613c-b5de-4774-b0c6-fdb78b4c7267/6200613c-b5de-4774-b0c6-fdb78b4c7267.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 870.763424] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 870.764125] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c0d3a03d-c953-454b-a06c-4fe685a8cec4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.772858] env[68638]: DEBUG oslo_vmware.api [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 870.772858] env[68638]: value = "task-2833719" [ 870.772858] env[68638]: _type = "Task" [ 870.772858] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.783607] env[68638]: DEBUG oslo_vmware.api [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833719, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.804048] env[68638]: DEBUG oslo_concurrency.lockutils [None req-67dcc894-6321-4b80-8297-e70e7f7c842f tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "17f6cd0a-bbc1-47c3-9c36-2166ba448de2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.498s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.882629] env[68638]: DEBUG oslo_vmware.rw_handles [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526feb9e-2a5b-0170-57a5-dbc749326595/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 870.883248] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a0bbe38-f189-4535-8cc2-3f7b354f5f68 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.890764] env[68638]: DEBUG oslo_vmware.rw_handles [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526feb9e-2a5b-0170-57a5-dbc749326595/disk-0.vmdk is in state: ready. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 870.892020] env[68638]: ERROR oslo_vmware.rw_handles [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526feb9e-2a5b-0170-57a5-dbc749326595/disk-0.vmdk due to incomplete transfer. [ 870.892020] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6649ca75-b861-4bf7-869f-20b52c26ede9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.900284] env[68638]: DEBUG oslo_vmware.rw_handles [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526feb9e-2a5b-0170-57a5-dbc749326595/disk-0.vmdk. {{(pid=68638) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 870.900732] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Uploaded image e06fa969-493e-41dc-bcf6-86e7dafe726d to the Glance image server {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 870.903095] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Destroying the VM {{(pid=68638) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 870.903434] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7b6cb5f1-1c06-4044-93c8-9e81a2e83f2b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.910683] env[68638]: DEBUG oslo_vmware.api [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 870.910683] env[68638]: value = "task-2833720" [ 870.910683] env[68638]: _type = "Task" [ 870.910683] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.926121] env[68638]: DEBUG oslo_vmware.api [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833720, 'name': Destroy_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.951549] env[68638]: DEBUG nova.scheduler.client.report [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 95 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 870.951833] env[68638]: DEBUG nova.compute.provider_tree [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 95 to 96 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 870.952027] env[68638]: DEBUG nova.compute.provider_tree [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 871.209902] env[68638]: DEBUG oslo_vmware.api [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833718, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088841} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.209902] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 871.209902] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3439feb-03a6-4697-a86c-f82b05a27dc5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.230383] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] c07f6e3a-86cf-4584-aa5e-5adc4bf086e3/c07f6e3a-86cf-4584-aa5e-5adc4bf086e3.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 871.230871] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad087981-afc9-43c7-a97f-bb707fa4fd1d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.255671] env[68638]: DEBUG oslo_vmware.api [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Waiting for the task: (returnval){ [ 871.255671] env[68638]: value = "task-2833721" [ 871.255671] env[68638]: _type = "Task" [ 871.255671] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.268799] env[68638]: DEBUG oslo_vmware.api [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833721, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.285022] env[68638]: DEBUG oslo_vmware.api [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833719, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082638} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.285022] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 871.285022] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c4442d7-32fe-4da0-9518-268dfbe1c200 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.306843] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 6200613c-b5de-4774-b0c6-fdb78b4c7267/6200613c-b5de-4774-b0c6-fdb78b4c7267.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 871.307628] env[68638]: DEBUG nova.compute.manager [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 871.311162] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a2fbbe1-3776-43cd-b4b0-0d1656094a09 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.334937] env[68638]: DEBUG oslo_vmware.api [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 871.334937] env[68638]: value = "task-2833722" [ 871.334937] env[68638]: _type = "Task" [ 871.334937] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.345376] env[68638]: DEBUG oslo_vmware.api [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833722, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.422820] env[68638]: DEBUG oslo_vmware.api [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833720, 'name': Destroy_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.458630] env[68638]: DEBUG oslo_concurrency.lockutils [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 871.459705] env[68638]: DEBUG nova.compute.manager [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 871.466253] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 44.344s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 871.770135] env[68638]: DEBUG oslo_vmware.api [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833721, 'name': ReconfigVM_Task, 'duration_secs': 0.303696} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.770431] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Reconfigured VM instance instance-00000043 to attach disk [datastore1] c07f6e3a-86cf-4584-aa5e-5adc4bf086e3/c07f6e3a-86cf-4584-aa5e-5adc4bf086e3.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 871.771209] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e32ce662-5e00-4d3a-b2bb-7182204d5e89 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.781701] env[68638]: DEBUG oslo_vmware.api [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Waiting for the task: (returnval){ [ 871.781701] env[68638]: value = "task-2833723" [ 871.781701] env[68638]: _type = "Task" [ 871.781701] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.793279] env[68638]: DEBUG oslo_vmware.api [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833723, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.847609] env[68638]: DEBUG oslo_vmware.api [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833722, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.858439] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.926040] env[68638]: DEBUG oslo_vmware.api [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833720, 'name': Destroy_Task, 'duration_secs': 0.998199} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.926040] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Destroyed the VM [ 871.926040] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Deleting Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 871.926040] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6a939a54-6d09-41b4-990c-f0abc9e76c46 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.934894] env[68638]: DEBUG oslo_vmware.api [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 871.934894] env[68638]: value = "task-2833724" [ 871.934894] env[68638]: _type = "Task" [ 871.934894] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.950448] env[68638]: DEBUG oslo_vmware.api [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833724, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.969290] env[68638]: DEBUG nova.compute.utils [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 871.990116] env[68638]: DEBUG nova.compute.manager [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 871.990308] env[68638]: DEBUG nova.network.neutron [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 872.058574] env[68638]: DEBUG nova.policy [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '28c09a80775a4919b09d3baae8689650', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd95966c092754deca9ed66c97041235b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 872.293722] env[68638]: DEBUG oslo_vmware.api [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833723, 'name': Rename_Task, 'duration_secs': 0.188569} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.294169] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 872.294275] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7f82abd9-3c36-4155-8b4b-639d7b07b956 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.301653] env[68638]: DEBUG oslo_vmware.api [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Waiting for the task: (returnval){ [ 872.301653] env[68638]: value = "task-2833725" [ 872.301653] env[68638]: _type = "Task" [ 872.301653] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.312177] env[68638]: DEBUG oslo_vmware.api [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833725, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.346909] env[68638]: DEBUG oslo_vmware.api [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833722, 'name': ReconfigVM_Task, 'duration_secs': 0.65768} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.348867] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 6200613c-b5de-4774-b0c6-fdb78b4c7267/6200613c-b5de-4774-b0c6-fdb78b4c7267.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 872.348867] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-62d57806-010f-470a-96f8-9253766d7a09 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.356289] env[68638]: DEBUG oslo_vmware.api [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 872.356289] env[68638]: value = "task-2833726" [ 872.356289] env[68638]: _type = "Task" [ 872.356289] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.366357] env[68638]: DEBUG oslo_vmware.api [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833726, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.450509] env[68638]: DEBUG oslo_vmware.api [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833724, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.497915] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Applying migration context for instance a09c4492-34fd-4010-b547-bfb5b61f252d as it has an incoming, in-progress migration e1da74ab-012b-46a6-9b56-2cbd2d894fe2. Migration status is reverting {{(pid=68638) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 872.501569] env[68638]: INFO nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Updating resource usage from migration e1da74ab-012b-46a6-9b56-2cbd2d894fe2 [ 872.504249] env[68638]: DEBUG nova.compute.manager [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 872.521476] env[68638]: DEBUG nova.network.neutron [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Successfully created port: 98fe1cde-b2f1-4fe7-9f25-74077ad59399 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 872.536713] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance c80895d5-1a59-4779-9da9-9aeec10bc395 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 872.536827] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 423af2cc-4dea-445f-a01c-6d4d57c3f0de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 872.537018] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 7617a7b1-3b21-4d38-b090-1d35bc74637b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 872.537749] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 4edaaa5d-535a-4c63-ab44-724548a0f3eb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 872.537749] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 872.537749] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 2fa9b930-c76c-4cac-a371-a6b9899dc71e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 872.537749] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 1b176c5d-e77c-410b-b282-b7bba65359a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 872.537749] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 039edcf8-7908-4be4-8bd3-0b55545b6f7b actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 872.538010] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance bb86aabd-129d-4c14-9db1-6676a5e7b9fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 872.538053] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 90c192bd-b823-414c-b793-260eacc9904f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 872.538227] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 809416da-af6c-429d-b4b2-5334768aa744 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 872.538283] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 333d88b6-2182-4e9c-9430-058e67921828 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 872.538599] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 63669b15-2ec8-4a0d-b772-6ef7407e8ebf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 872.538599] env[68638]: WARNING nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 872.538705] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 27ff37a6-de93-4a4b-904f-a91fdb8b0aff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 872.538943] env[68638]: WARNING nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance a98f0c63-d327-47b9-b0c2-f7790f1ae87d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 872.539008] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance ee752ace-fa19-4fd7-af89-f6628ce3d087 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 872.539752] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 02894a47-59b1-475b-b934-c8d0b6dabc5b actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 872.539752] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Migration e1da74ab-012b-46a6-9b56-2cbd2d894fe2 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 872.539752] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance a09c4492-34fd-4010-b547-bfb5b61f252d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 872.539752] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 4c954bb4-6291-47d5-a65c-0ad92a0fd193 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 872.539752] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 872.539752] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 17f6cd0a-bbc1-47c3-9c36-2166ba448de2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 872.540084] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 6200613c-b5de-4774-b0c6-fdb78b4c7267 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 872.540084] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance c07f6e3a-86cf-4584-aa5e-5adc4bf086e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 872.540189] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance d49fdd3f-3ad6-4396-811f-67f1ef1f2940 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 872.813750] env[68638]: DEBUG oslo_vmware.api [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833725, 'name': PowerOnVM_Task, 'duration_secs': 0.49266} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.817076] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 872.817076] env[68638]: INFO nova.compute.manager [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Took 5.39 seconds to spawn the instance on the hypervisor. [ 872.817076] env[68638]: DEBUG nova.compute.manager [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 872.817076] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-998ff3c5-7039-4282-86fd-c3c51000205d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.869153] env[68638]: DEBUG oslo_vmware.api [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833726, 'name': Rename_Task, 'duration_secs': 0.217815} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.870840] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 872.872086] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-224d3678-f5cc-43b8-9a81-4dca82f374ff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.881076] env[68638]: DEBUG oslo_vmware.api [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 872.881076] env[68638]: value = "task-2833727" [ 872.881076] env[68638]: _type = "Task" [ 872.881076] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.901970] env[68638]: DEBUG oslo_vmware.api [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833727, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.946933] env[68638]: DEBUG oslo_vmware.api [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833724, 'name': RemoveSnapshot_Task, 'duration_secs': 0.868199} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.947230] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Deleted Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 872.947469] env[68638]: INFO nova.compute.manager [None req-15e5511d-288e-41d5-9e7c-d296988ad993 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Took 17.93 seconds to snapshot the instance on the hypervisor. [ 873.043605] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 92c90438-f7cc-4a48-bfac-f7912709cf88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 873.342204] env[68638]: INFO nova.compute.manager [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Took 56.46 seconds to build instance. [ 873.392616] env[68638]: DEBUG oslo_vmware.api [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833727, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.514139] env[68638]: DEBUG nova.compute.manager [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 873.545764] env[68638]: DEBUG nova.virt.hardware [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 873.545868] env[68638]: DEBUG nova.virt.hardware [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 873.546206] env[68638]: DEBUG nova.virt.hardware [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 873.546385] env[68638]: DEBUG nova.virt.hardware [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 873.546605] env[68638]: DEBUG nova.virt.hardware [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 873.546689] env[68638]: DEBUG nova.virt.hardware [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 873.546920] env[68638]: DEBUG nova.virt.hardware [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 873.549083] env[68638]: DEBUG nova.virt.hardware [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 873.549083] env[68638]: DEBUG nova.virt.hardware [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 873.549083] env[68638]: DEBUG nova.virt.hardware [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 873.549083] env[68638]: DEBUG nova.virt.hardware [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 873.549658] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 3c3fcbca-2477-4037-a978-4b8e9ed0a690 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 873.553496] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9bf1486-742a-405a-9359-38f6ce0102c5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.563895] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c627465-41c2-4336-8bce-8a672d7f2b91 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.799331] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquiring lock "17f6cd0a-bbc1-47c3-9c36-2166ba448de2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.800766] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "17f6cd0a-bbc1-47c3-9c36-2166ba448de2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.800766] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquiring lock "17f6cd0a-bbc1-47c3-9c36-2166ba448de2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.800766] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "17f6cd0a-bbc1-47c3-9c36-2166ba448de2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.800766] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "17f6cd0a-bbc1-47c3-9c36-2166ba448de2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.802364] env[68638]: INFO nova.compute.manager [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Terminating instance [ 873.844899] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fc4ca751-e17e-47a2-a8bf-82665174a4ac tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Lock "c07f6e3a-86cf-4584-aa5e-5adc4bf086e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.523s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.898036] env[68638]: DEBUG oslo_vmware.api [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833727, 'name': PowerOnVM_Task, 'duration_secs': 0.899882} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.898402] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 873.898549] env[68638]: INFO nova.compute.manager [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Took 9.13 seconds to spawn the instance on the hypervisor. [ 873.898711] env[68638]: DEBUG nova.compute.manager [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 873.899842] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47c85025-7b00-475a-a6c5-b6531929ee51 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.038483] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Acquiring lock "30193a76-a391-4a64-98cc-7e22dcf7218c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.038903] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Lock "30193a76-a391-4a64-98cc-7e22dcf7218c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.059268] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 9ba0f737-7947-409c-9163-79d621a29285 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 874.306608] env[68638]: DEBUG nova.compute.manager [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 874.306862] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 874.308257] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ea3133-0e9f-4ad9-958a-696acd4e6af8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.318407] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 874.318683] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7406024f-ddae-4448-a088-a5bd4e1ab808 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.326946] env[68638]: DEBUG oslo_vmware.api [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for the task: (returnval){ [ 874.326946] env[68638]: value = "task-2833728" [ 874.326946] env[68638]: _type = "Task" [ 874.326946] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.339462] env[68638]: DEBUG oslo_vmware.api [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833728, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.348184] env[68638]: DEBUG nova.compute.manager [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 874.385403] env[68638]: DEBUG nova.compute.manager [req-3c50dbf3-9e64-4bcf-bb34-f41ab169e9eb req-c9332403-2345-4589-a964-d4566dbc8c00 service nova] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Received event network-vif-plugged-98fe1cde-b2f1-4fe7-9f25-74077ad59399 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 874.385710] env[68638]: DEBUG oslo_concurrency.lockutils [req-3c50dbf3-9e64-4bcf-bb34-f41ab169e9eb req-c9332403-2345-4589-a964-d4566dbc8c00 service nova] Acquiring lock "d49fdd3f-3ad6-4396-811f-67f1ef1f2940-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.386010] env[68638]: DEBUG oslo_concurrency.lockutils [req-3c50dbf3-9e64-4bcf-bb34-f41ab169e9eb req-c9332403-2345-4589-a964-d4566dbc8c00 service nova] Lock "d49fdd3f-3ad6-4396-811f-67f1ef1f2940-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.386266] env[68638]: DEBUG oslo_concurrency.lockutils [req-3c50dbf3-9e64-4bcf-bb34-f41ab169e9eb req-c9332403-2345-4589-a964-d4566dbc8c00 service nova] Lock "d49fdd3f-3ad6-4396-811f-67f1ef1f2940-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.386510] env[68638]: DEBUG nova.compute.manager [req-3c50dbf3-9e64-4bcf-bb34-f41ab169e9eb req-c9332403-2345-4589-a964-d4566dbc8c00 service nova] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] No waiting events found dispatching network-vif-plugged-98fe1cde-b2f1-4fe7-9f25-74077ad59399 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 874.386770] env[68638]: WARNING nova.compute.manager [req-3c50dbf3-9e64-4bcf-bb34-f41ab169e9eb req-c9332403-2345-4589-a964-d4566dbc8c00 service nova] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Received unexpected event network-vif-plugged-98fe1cde-b2f1-4fe7-9f25-74077ad59399 for instance with vm_state building and task_state spawning. [ 874.406228] env[68638]: DEBUG nova.network.neutron [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Successfully updated port: 98fe1cde-b2f1-4fe7-9f25-74077ad59399 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 874.419373] env[68638]: INFO nova.compute.manager [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Took 58.67 seconds to build instance. [ 874.561030] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance fd6d5951-f2a1-422d-b137-4d19759f9060 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 874.838250] env[68638]: DEBUG oslo_vmware.api [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833728, 'name': PowerOffVM_Task, 'duration_secs': 0.236594} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.838394] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 874.838568] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 874.838825] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-08136ec1-eb7c-4472-8035-81173ed2d22b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.870911] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.910612] env[68638]: DEBUG oslo_concurrency.lockutils [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquiring lock "refresh_cache-d49fdd3f-3ad6-4396-811f-67f1ef1f2940" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.910612] env[68638]: DEBUG oslo_concurrency.lockutils [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquired lock "refresh_cache-d49fdd3f-3ad6-4396-811f-67f1ef1f2940" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 874.910612] env[68638]: DEBUG nova.network.neutron [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 874.924349] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4759ae3c-30e1-48d0-9bad-735612af969f tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "6200613c-b5de-4774-b0c6-fdb78b4c7267" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.972s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.946461] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 874.946820] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 874.947134] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Deleting the datastore file [datastore1] 17f6cd0a-bbc1-47c3-9c36-2166ba448de2 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 874.947512] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aa80340f-1cfa-407e-9dbb-79f7a9199efc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.955057] env[68638]: DEBUG oslo_vmware.api [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for the task: (returnval){ [ 874.955057] env[68638]: value = "task-2833730" [ 874.955057] env[68638]: _type = "Task" [ 874.955057] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.964454] env[68638]: DEBUG oslo_vmware.api [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833730, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.003222] env[68638]: INFO nova.compute.manager [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Rebuilding instance [ 875.046818] env[68638]: DEBUG nova.compute.manager [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 875.047681] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f875f67f-a91b-47a7-bcc7-ac9b21d90d61 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.064475] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 9975e756-b571-4e70-ba50-a6001d0b064c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 875.224988] env[68638]: DEBUG oslo_concurrency.lockutils [None req-92e6db39-ce92-44c8-89cd-99dbcb39a782 tempest-ServersAdminTestJSON-774851455 tempest-ServersAdminTestJSON-774851455-project-admin] Acquiring lock "refresh_cache-6200613c-b5de-4774-b0c6-fdb78b4c7267" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.225137] env[68638]: DEBUG oslo_concurrency.lockutils [None req-92e6db39-ce92-44c8-89cd-99dbcb39a782 tempest-ServersAdminTestJSON-774851455 tempest-ServersAdminTestJSON-774851455-project-admin] Acquired lock "refresh_cache-6200613c-b5de-4774-b0c6-fdb78b4c7267" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 875.225304] env[68638]: DEBUG nova.network.neutron [None req-92e6db39-ce92-44c8-89cd-99dbcb39a782 tempest-ServersAdminTestJSON-774851455 tempest-ServersAdminTestJSON-774851455-project-admin] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 875.426919] env[68638]: DEBUG nova.compute.manager [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 875.460533] env[68638]: DEBUG nova.network.neutron [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 875.468510] env[68638]: DEBUG oslo_vmware.api [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833730, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.295766} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.469485] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 875.469688] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 875.469864] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 875.470211] env[68638]: INFO nova.compute.manager [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Took 1.16 seconds to destroy the instance on the hypervisor. [ 875.470325] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 875.470514] env[68638]: DEBUG nova.compute.manager [-] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 875.470612] env[68638]: DEBUG nova.network.neutron [-] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 875.568026] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 1bc685aa-4e88-402f-b581-d179706b12a5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 875.626667] env[68638]: DEBUG nova.network.neutron [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Updating instance_info_cache with network_info: [{"id": "98fe1cde-b2f1-4fe7-9f25-74077ad59399", "address": "fa:16:3e:ab:24:ed", "network": {"id": "6122cb68-111c-4c39-b9c0-ffc1af5bd833", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-363451206-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d95966c092754deca9ed66c97041235b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98fe1cde-b2", "ovs_interfaceid": "98fe1cde-b2f1-4fe7-9f25-74077ad59399", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.927498] env[68638]: DEBUG nova.compute.manager [req-83e605af-ad66-4bdb-8429-6e69e4d432c2 req-6737202d-3819-4e01-b6a2-e8516d7bbd0a service nova] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Received event network-vif-deleted-0c7f7cbe-6520-4959-82ff-39bd93c26cb4 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 875.927772] env[68638]: INFO nova.compute.manager [req-83e605af-ad66-4bdb-8429-6e69e4d432c2 req-6737202d-3819-4e01-b6a2-e8516d7bbd0a service nova] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Neutron deleted interface 0c7f7cbe-6520-4959-82ff-39bd93c26cb4; detaching it from the instance and deleting it from the info cache [ 875.927869] env[68638]: DEBUG nova.network.neutron [req-83e605af-ad66-4bdb-8429-6e69e4d432c2 req-6737202d-3819-4e01-b6a2-e8516d7bbd0a service nova] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.962452] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.046485] env[68638]: DEBUG nova.network.neutron [None req-92e6db39-ce92-44c8-89cd-99dbcb39a782 tempest-ServersAdminTestJSON-774851455 tempest-ServersAdminTestJSON-774851455-project-admin] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Updating instance_info_cache with network_info: [{"id": "74220954-1e9b-4dd4-a7a3-6a799a426d21", "address": "fa:16:3e:64:27:8f", "network": {"id": "3cca37af-f3c4-433b-875a-8e01675c3975", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1292035020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ded98d5a15c54e01b752c52b88549b3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74220954-1e", "ovs_interfaceid": "74220954-1e9b-4dd4-a7a3-6a799a426d21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.062713] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 876.063041] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c14b5c98-1f93-4f0c-a1bb-15ed16f24789 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.072103] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 53e92f51-9010-4fb2-89e1-9d16a252ef6e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 876.073414] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Waiting for the task: (returnval){ [ 876.073414] env[68638]: value = "task-2833731" [ 876.073414] env[68638]: _type = "Task" [ 876.073414] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.083843] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833731, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.129707] env[68638]: DEBUG oslo_concurrency.lockutils [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Releasing lock "refresh_cache-d49fdd3f-3ad6-4396-811f-67f1ef1f2940" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.130115] env[68638]: DEBUG nova.compute.manager [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Instance network_info: |[{"id": "98fe1cde-b2f1-4fe7-9f25-74077ad59399", "address": "fa:16:3e:ab:24:ed", "network": {"id": "6122cb68-111c-4c39-b9c0-ffc1af5bd833", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-363451206-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d95966c092754deca9ed66c97041235b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98fe1cde-b2", "ovs_interfaceid": "98fe1cde-b2f1-4fe7-9f25-74077ad59399", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 876.130522] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:24:ed', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7ab8d568-adb0-4f3b-b6cc-68413e6546ae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '98fe1cde-b2f1-4fe7-9f25-74077ad59399', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 876.139394] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 876.139394] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 876.139543] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-97f549a8-e748-4275-973a-50e27407246c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.162983] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 876.162983] env[68638]: value = "task-2833732" [ 876.162983] env[68638]: _type = "Task" [ 876.162983] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.172147] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833732, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.213303] env[68638]: DEBUG nova.network.neutron [-] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.398694] env[68638]: DEBUG oslo_concurrency.lockutils [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Acquiring lock "63669b15-2ec8-4a0d-b772-6ef7407e8ebf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.399021] env[68638]: DEBUG oslo_concurrency.lockutils [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lock "63669b15-2ec8-4a0d-b772-6ef7407e8ebf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 876.399714] env[68638]: DEBUG oslo_concurrency.lockutils [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Acquiring lock "63669b15-2ec8-4a0d-b772-6ef7407e8ebf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.399929] env[68638]: DEBUG oslo_concurrency.lockutils [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lock "63669b15-2ec8-4a0d-b772-6ef7407e8ebf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 876.400122] env[68638]: DEBUG oslo_concurrency.lockutils [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lock "63669b15-2ec8-4a0d-b772-6ef7407e8ebf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 876.403165] env[68638]: INFO nova.compute.manager [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Terminating instance [ 876.434861] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1743a2df-3d6c-474f-a012-1ebabd7f6ce9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.458648] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1664cfc-a4c0-4c28-8a81-72dbf7d945b5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.471604] env[68638]: DEBUG nova.compute.manager [req-0114e16a-c879-479e-a34a-c6273a2d215d req-54637f93-7a0e-49b7-ae1c-90355ba0414e service nova] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Received event network-changed-98fe1cde-b2f1-4fe7-9f25-74077ad59399 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 876.471823] env[68638]: DEBUG nova.compute.manager [req-0114e16a-c879-479e-a34a-c6273a2d215d req-54637f93-7a0e-49b7-ae1c-90355ba0414e service nova] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Refreshing instance network info cache due to event network-changed-98fe1cde-b2f1-4fe7-9f25-74077ad59399. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 876.472083] env[68638]: DEBUG oslo_concurrency.lockutils [req-0114e16a-c879-479e-a34a-c6273a2d215d req-54637f93-7a0e-49b7-ae1c-90355ba0414e service nova] Acquiring lock "refresh_cache-d49fdd3f-3ad6-4396-811f-67f1ef1f2940" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.472361] env[68638]: DEBUG oslo_concurrency.lockutils [req-0114e16a-c879-479e-a34a-c6273a2d215d req-54637f93-7a0e-49b7-ae1c-90355ba0414e service nova] Acquired lock "refresh_cache-d49fdd3f-3ad6-4396-811f-67f1ef1f2940" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 876.472432] env[68638]: DEBUG nova.network.neutron [req-0114e16a-c879-479e-a34a-c6273a2d215d req-54637f93-7a0e-49b7-ae1c-90355ba0414e service nova] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Refreshing network info cache for port 98fe1cde-b2f1-4fe7-9f25-74077ad59399 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 876.509022] env[68638]: DEBUG nova.compute.manager [req-83e605af-ad66-4bdb-8429-6e69e4d432c2 req-6737202d-3819-4e01-b6a2-e8516d7bbd0a service nova] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Detach interface failed, port_id=0c7f7cbe-6520-4959-82ff-39bd93c26cb4, reason: Instance 17f6cd0a-bbc1-47c3-9c36-2166ba448de2 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 876.551897] env[68638]: DEBUG oslo_concurrency.lockutils [None req-92e6db39-ce92-44c8-89cd-99dbcb39a782 tempest-ServersAdminTestJSON-774851455 tempest-ServersAdminTestJSON-774851455-project-admin] Releasing lock "refresh_cache-6200613c-b5de-4774-b0c6-fdb78b4c7267" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.552294] env[68638]: DEBUG nova.compute.manager [None req-92e6db39-ce92-44c8-89cd-99dbcb39a782 tempest-ServersAdminTestJSON-774851455 tempest-ServersAdminTestJSON-774851455-project-admin] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Inject network info {{(pid=68638) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 876.552666] env[68638]: DEBUG nova.compute.manager [None req-92e6db39-ce92-44c8-89cd-99dbcb39a782 tempest-ServersAdminTestJSON-774851455 tempest-ServersAdminTestJSON-774851455-project-admin] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] network_info to inject: |[{"id": "74220954-1e9b-4dd4-a7a3-6a799a426d21", "address": "fa:16:3e:64:27:8f", "network": {"id": "3cca37af-f3c4-433b-875a-8e01675c3975", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1292035020-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ded98d5a15c54e01b752c52b88549b3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74220954-1e", "ovs_interfaceid": "74220954-1e9b-4dd4-a7a3-6a799a426d21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 876.564366] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-92e6db39-ce92-44c8-89cd-99dbcb39a782 tempest-ServersAdminTestJSON-774851455 tempest-ServersAdminTestJSON-774851455-project-admin] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Reconfiguring VM instance to set the machine id {{(pid=68638) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 876.564366] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f38da0b0-43d9-4b11-a7ec-e27884b335af {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.574846] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance d2d30008-5058-4be3-b803-00d8ca4450d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 876.583628] env[68638]: DEBUG oslo_vmware.api [None req-92e6db39-ce92-44c8-89cd-99dbcb39a782 tempest-ServersAdminTestJSON-774851455 tempest-ServersAdminTestJSON-774851455-project-admin] Waiting for the task: (returnval){ [ 876.583628] env[68638]: value = "task-2833733" [ 876.583628] env[68638]: _type = "Task" [ 876.583628] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.594657] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833731, 'name': PowerOffVM_Task, 'duration_secs': 0.134979} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.595516] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 876.596351] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 876.597602] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62309468-e7f8-49aa-85d6-1b2610372345 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.605796] env[68638]: DEBUG oslo_vmware.api [None req-92e6db39-ce92-44c8-89cd-99dbcb39a782 tempest-ServersAdminTestJSON-774851455 tempest-ServersAdminTestJSON-774851455-project-admin] Task: {'id': task-2833733, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.611964] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 876.612072] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bcfc2e98-ca5e-435a-b3fc-1f3f543ca2ac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.642588] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 876.642588] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 876.642800] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Deleting the datastore file [datastore1] c07f6e3a-86cf-4584-aa5e-5adc4bf086e3 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 876.642900] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2cfe358d-a988-46ac-b3a4-30db4a54bd53 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.652177] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Waiting for the task: (returnval){ [ 876.652177] env[68638]: value = "task-2833735" [ 876.652177] env[68638]: _type = "Task" [ 876.652177] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.661331] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833735, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.673794] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833732, 'name': CreateVM_Task, 'duration_secs': 0.469068} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.674037] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 876.674715] env[68638]: DEBUG oslo_concurrency.lockutils [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.674886] env[68638]: DEBUG oslo_concurrency.lockutils [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 876.675266] env[68638]: DEBUG oslo_concurrency.lockutils [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 876.675531] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59fdfabd-27f8-4060-9746-a97598d2cc2d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.680911] env[68638]: DEBUG oslo_vmware.api [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Waiting for the task: (returnval){ [ 876.680911] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528e4858-8565-a98e-b6eb-cf96b0633e29" [ 876.680911] env[68638]: _type = "Task" [ 876.680911] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.690128] env[68638]: DEBUG oslo_vmware.api [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528e4858-8565-a98e-b6eb-cf96b0633e29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.715966] env[68638]: INFO nova.compute.manager [-] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Took 1.25 seconds to deallocate network for instance. [ 876.906882] env[68638]: DEBUG nova.compute.manager [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 876.907080] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 876.907986] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15eb4f50-0a6b-4720-858a-7123507f026f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.916854] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 876.917156] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2ca7ba0d-d5af-42e0-a2bb-313e15642a8c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.926025] env[68638]: DEBUG oslo_vmware.api [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 876.926025] env[68638]: value = "task-2833736" [ 876.926025] env[68638]: _type = "Task" [ 876.926025] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.934347] env[68638]: DEBUG oslo_vmware.api [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833736, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.084873] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 877.085224] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Total usable vcpus: 48, total allocated vcpus: 24 {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 877.085382] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=5248MB phys_disk=200GB used_disk=23GB total_vcpus=48 used_vcpus=24 pci_stats=[] {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 877.098992] env[68638]: DEBUG oslo_vmware.api [None req-92e6db39-ce92-44c8-89cd-99dbcb39a782 tempest-ServersAdminTestJSON-774851455 tempest-ServersAdminTestJSON-774851455-project-admin] Task: {'id': task-2833733, 'name': ReconfigVM_Task, 'duration_secs': 0.18188} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.099339] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-92e6db39-ce92-44c8-89cd-99dbcb39a782 tempest-ServersAdminTestJSON-774851455 tempest-ServersAdminTestJSON-774851455-project-admin] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Reconfigured VM instance to set the machine id {{(pid=68638) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 877.165378] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833735, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.116782} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.165712] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 877.165958] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 877.166196] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 877.202036] env[68638]: DEBUG oslo_vmware.api [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528e4858-8565-a98e-b6eb-cf96b0633e29, 'name': SearchDatastore_Task, 'duration_secs': 0.020507} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.202036] env[68638]: DEBUG oslo_concurrency.lockutils [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 877.202036] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 877.202036] env[68638]: DEBUG oslo_concurrency.lockutils [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.202036] env[68638]: DEBUG oslo_concurrency.lockutils [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 877.202036] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 877.202036] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fafe4312-0b8d-46d7-acdd-e44e2f404c16 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.213117] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 877.213323] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 877.217069] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a3557ec-910b-48f0-a27e-c08ebfe0c7cd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.227893] env[68638]: DEBUG oslo_vmware.api [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Waiting for the task: (returnval){ [ 877.227893] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52cc1c99-0658-39fa-03b4-301e1ad33440" [ 877.227893] env[68638]: _type = "Task" [ 877.227893] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.235132] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.243178] env[68638]: DEBUG oslo_vmware.api [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52cc1c99-0658-39fa-03b4-301e1ad33440, 'name': SearchDatastore_Task, 'duration_secs': 0.011605} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.244169] env[68638]: DEBUG nova.network.neutron [req-0114e16a-c879-479e-a34a-c6273a2d215d req-54637f93-7a0e-49b7-ae1c-90355ba0414e service nova] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Updated VIF entry in instance network info cache for port 98fe1cde-b2f1-4fe7-9f25-74077ad59399. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 877.244547] env[68638]: DEBUG nova.network.neutron [req-0114e16a-c879-479e-a34a-c6273a2d215d req-54637f93-7a0e-49b7-ae1c-90355ba0414e service nova] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Updating instance_info_cache with network_info: [{"id": "98fe1cde-b2f1-4fe7-9f25-74077ad59399", "address": "fa:16:3e:ab:24:ed", "network": {"id": "6122cb68-111c-4c39-b9c0-ffc1af5bd833", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-363451206-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d95966c092754deca9ed66c97041235b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98fe1cde-b2", "ovs_interfaceid": "98fe1cde-b2f1-4fe7-9f25-74077ad59399", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.246809] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a19884df-402a-4d6a-add8-9d838e7f4d1f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.256250] env[68638]: DEBUG oslo_vmware.api [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Waiting for the task: (returnval){ [ 877.256250] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52938f6b-8358-ed29-a32c-2181712997f7" [ 877.256250] env[68638]: _type = "Task" [ 877.256250] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.268349] env[68638]: DEBUG oslo_vmware.api [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52938f6b-8358-ed29-a32c-2181712997f7, 'name': SearchDatastore_Task, 'duration_secs': 0.011166} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.271123] env[68638]: DEBUG oslo_concurrency.lockutils [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 877.271410] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] d49fdd3f-3ad6-4396-811f-67f1ef1f2940/d49fdd3f-3ad6-4396-811f-67f1ef1f2940.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 877.271885] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-94857bb7-0c9e-44a1-b0a7-f08b79835bbc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.280632] env[68638]: DEBUG oslo_vmware.api [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Waiting for the task: (returnval){ [ 877.280632] env[68638]: value = "task-2833737" [ 877.280632] env[68638]: _type = "Task" [ 877.280632] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.291186] env[68638]: DEBUG oslo_vmware.api [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833737, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.440545] env[68638]: DEBUG oslo_vmware.api [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833736, 'name': PowerOffVM_Task, 'duration_secs': 0.357408} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.440545] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 877.440846] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 877.440846] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c8d4fe90-6548-422f-9e64-5724e5eb9e65 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.524572] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 877.524829] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 877.525106] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Deleting the datastore file [datastore1] 63669b15-2ec8-4a0d-b772-6ef7407e8ebf {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 877.525490] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7856a68b-52e1-467b-b513-8f9fb1bfafe1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.534228] env[68638]: DEBUG oslo_vmware.api [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 877.534228] env[68638]: value = "task-2833739" [ 877.534228] env[68638]: _type = "Task" [ 877.534228] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.551361] env[68638]: DEBUG oslo_vmware.api [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833739, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.647132] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9060b71-9669-453d-a1f8-9649d9b2eb1b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.654863] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9147be43-025e-4452-85a4-9876f66a196a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.378421] env[68638]: DEBUG oslo_concurrency.lockutils [req-0114e16a-c879-479e-a34a-c6273a2d215d req-54637f93-7a0e-49b7-ae1c-90355ba0414e service nova] Releasing lock "refresh_cache-d49fdd3f-3ad6-4396-811f-67f1ef1f2940" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 878.378987] env[68638]: INFO nova.compute.manager [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Rebuilding instance [ 878.387801] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b77857a8-55bd-4ef5-899e-973fb63ae847 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.396720] env[68638]: DEBUG oslo_vmware.api [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833737, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507994} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.400970] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] d49fdd3f-3ad6-4396-811f-67f1ef1f2940/d49fdd3f-3ad6-4396-811f-67f1ef1f2940.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 878.401107] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 878.401391] env[68638]: DEBUG oslo_vmware.api [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833739, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.358059} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.403318] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-28d1b06e-5b89-4477-a11a-7b0712eb2043 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.405724] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 878.405724] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 878.405829] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 878.405934] env[68638]: INFO nova.compute.manager [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Took 1.50 seconds to destroy the instance on the hypervisor. [ 878.406180] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 878.407342] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a90559a8-8680-46e0-9f6f-da69bc9bbe6e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.411102] env[68638]: DEBUG nova.compute.manager [-] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 878.414018] env[68638]: DEBUG nova.network.neutron [-] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 878.423794] env[68638]: DEBUG nova.compute.provider_tree [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 878.430396] env[68638]: DEBUG oslo_vmware.api [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Waiting for the task: (returnval){ [ 878.430396] env[68638]: value = "task-2833740" [ 878.430396] env[68638]: _type = "Task" [ 878.430396] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.443410] env[68638]: DEBUG oslo_vmware.api [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833740, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.457362] env[68638]: DEBUG nova.compute.manager [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 878.458285] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ba92ea-1a8c-4029-a698-cf7ad40b96c3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.812400] env[68638]: DEBUG nova.compute.manager [req-556da234-67ca-4cb6-8dec-02ce3d378917 req-46242c25-d04e-4866-bd97-7b28d34207e1 service nova] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Received event network-vif-deleted-2c1b0e86-ed9d-4de7-9bcc-3c49c155a2c8 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 878.812605] env[68638]: INFO nova.compute.manager [req-556da234-67ca-4cb6-8dec-02ce3d378917 req-46242c25-d04e-4866-bd97-7b28d34207e1 service nova] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Neutron deleted interface 2c1b0e86-ed9d-4de7-9bcc-3c49c155a2c8; detaching it from the instance and deleting it from the info cache [ 878.812781] env[68638]: DEBUG nova.network.neutron [req-556da234-67ca-4cb6-8dec-02ce3d378917 req-46242c25-d04e-4866-bd97-7b28d34207e1 service nova] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.903759] env[68638]: DEBUG nova.virt.hardware [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 878.904059] env[68638]: DEBUG nova.virt.hardware [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 878.904244] env[68638]: DEBUG nova.virt.hardware [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 878.904434] env[68638]: DEBUG nova.virt.hardware [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 878.904579] env[68638]: DEBUG nova.virt.hardware [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 878.904725] env[68638]: DEBUG nova.virt.hardware [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 878.904946] env[68638]: DEBUG nova.virt.hardware [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 878.905139] env[68638]: DEBUG nova.virt.hardware [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 878.905308] env[68638]: DEBUG nova.virt.hardware [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 878.905468] env[68638]: DEBUG nova.virt.hardware [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 878.905638] env[68638]: DEBUG nova.virt.hardware [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 878.906512] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f4108c1-673b-405c-af4a-49174a283a12 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.918653] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1356705f-5bcb-4e7c-b68c-fd6f5e996784 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.936129] env[68638]: DEBUG nova.scheduler.client.report [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 878.940187] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Instance VIF info [] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 878.945371] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 878.949162] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 878.949395] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0a30e2b8-73b2-481a-b82b-b201a2a7f4ad {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.968267] env[68638]: DEBUG oslo_vmware.api [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833740, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071103} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.971454] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 878.971727] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 878.971727] env[68638]: value = "task-2833741" [ 878.971727] env[68638]: _type = "Task" [ 878.971727] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.972652] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-332d8c38-5c66-4b48-a2fe-29995e833704 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.985948] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833741, 'name': CreateVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.003444] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] d49fdd3f-3ad6-4396-811f-67f1ef1f2940/d49fdd3f-3ad6-4396-811f-67f1ef1f2940.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 879.004207] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8205d51-ea09-480c-b145-05f93b4d8280 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.023353] env[68638]: DEBUG oslo_vmware.api [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Waiting for the task: (returnval){ [ 879.023353] env[68638]: value = "task-2833742" [ 879.023353] env[68638]: _type = "Task" [ 879.023353] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.039071] env[68638]: DEBUG oslo_vmware.api [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833742, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.215643] env[68638]: DEBUG nova.network.neutron [-] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.315541] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e3dd0067-c957-444f-8922-04e2ca51030a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.326287] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec18ad4f-844c-40ef-92ad-90ab819a4d24 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.366813] env[68638]: DEBUG nova.compute.manager [req-556da234-67ca-4cb6-8dec-02ce3d378917 req-46242c25-d04e-4866-bd97-7b28d34207e1 service nova] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Detach interface failed, port_id=2c1b0e86-ed9d-4de7-9bcc-3c49c155a2c8, reason: Instance 63669b15-2ec8-4a0d-b772-6ef7407e8ebf could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 879.447783] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68638) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 879.448144] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 7.982s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.448311] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 41.004s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 879.452037] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 879.452037] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Cleaning up deleted instances {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11753}} [ 879.473571] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 879.474223] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-72be1032-d438-4f72-b55b-e62d665d6c21 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.486676] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833741, 'name': CreateVM_Task, 'duration_secs': 0.294146} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.487947] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 879.488307] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 879.488307] env[68638]: value = "task-2833743" [ 879.488307] env[68638]: _type = "Task" [ 879.488307] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.488660] env[68638]: DEBUG oslo_concurrency.lockutils [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.488812] env[68638]: DEBUG oslo_concurrency.lockutils [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 879.489172] env[68638]: DEBUG oslo_concurrency.lockutils [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 879.489473] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7385bfb8-1152-41c5-bd61-0a4df23f8bcd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.497816] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Waiting for the task: (returnval){ [ 879.497816] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528d4f8f-d0cd-932a-79a8-a8b425319c0b" [ 879.497816] env[68638]: _type = "Task" [ 879.497816] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.501508] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833743, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.509778] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528d4f8f-d0cd-932a-79a8-a8b425319c0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.536331] env[68638]: DEBUG oslo_vmware.api [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833742, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.718609] env[68638]: INFO nova.compute.manager [-] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Took 1.31 seconds to deallocate network for instance. [ 879.954517] env[68638]: DEBUG nova.objects.instance [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lazy-loading 'migration_context' on Instance uuid a09c4492-34fd-4010-b547-bfb5b61f252d {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 879.966092] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] There are 42 instances to clean {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11762}} [ 879.966362] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 96848760-c8a0-43fa-ac7c-e6e56d6d6d83] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 880.000099] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833743, 'name': PowerOffVM_Task, 'duration_secs': 0.195727} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.000402] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 880.000634] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 880.001424] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d3c75b5-992d-43f9-9c8a-a348812ba689 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.013165] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528d4f8f-d0cd-932a-79a8-a8b425319c0b, 'name': SearchDatastore_Task, 'duration_secs': 0.011193} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.015183] env[68638]: DEBUG oslo_concurrency.lockutils [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 880.015423] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 880.015653] env[68638]: DEBUG oslo_concurrency.lockutils [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.015799] env[68638]: DEBUG oslo_concurrency.lockutils [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 880.015972] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 880.016288] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 880.016499] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-549960cf-c399-4b05-bfec-a21924bc0c1c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.018287] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d57057bc-8882-4373-a869-b364ac96b08d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.028564] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 880.028732] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 880.032193] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8c9d29f-694d-41a5-8a03-fb9c20bbad73 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.034185] env[68638]: DEBUG oslo_vmware.api [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833742, 'name': ReconfigVM_Task, 'duration_secs': 0.673362} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.034435] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Reconfigured VM instance instance-00000044 to attach disk [datastore1] d49fdd3f-3ad6-4396-811f-67f1ef1f2940/d49fdd3f-3ad6-4396-811f-67f1ef1f2940.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 880.035289] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fab3740a-00d0-4639-8954-6a255ce1904d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.038311] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Waiting for the task: (returnval){ [ 880.038311] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52f69193-3f29-754d-b2ff-b3c0b99d3d1e" [ 880.038311] env[68638]: _type = "Task" [ 880.038311] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.042136] env[68638]: DEBUG oslo_vmware.api [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Waiting for the task: (returnval){ [ 880.042136] env[68638]: value = "task-2833745" [ 880.042136] env[68638]: _type = "Task" [ 880.042136] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.048350] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f69193-3f29-754d-b2ff-b3c0b99d3d1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.053603] env[68638]: DEBUG oslo_vmware.api [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833745, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.088731] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 880.088992] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 880.089141] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Deleting the datastore file [datastore1] 4edaaa5d-535a-4c63-ab44-724548a0f3eb {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 880.089418] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4ed51137-bdd1-495c-88db-014928dfc8f7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.095999] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 880.095999] env[68638]: value = "task-2833746" [ 880.095999] env[68638]: _type = "Task" [ 880.095999] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.104566] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833746, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.224803] env[68638]: DEBUG oslo_concurrency.lockutils [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 880.472138] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 772af0c0-a8dd-4167-87bc-617a9d95b54d] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 880.554619] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f69193-3f29-754d-b2ff-b3c0b99d3d1e, 'name': SearchDatastore_Task, 'duration_secs': 0.017389} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.559052] env[68638]: DEBUG oslo_vmware.api [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833745, 'name': Rename_Task, 'duration_secs': 0.157356} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.559052] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56c9a38d-c63a-48e6-8bc6-4410e6f39398 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.561175] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 880.563625] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-da1334e2-4cc5-4ebd-bf2f-5dc9c9ad750c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.569528] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Waiting for the task: (returnval){ [ 880.569528] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5212f2a9-f8a4-6d2b-5fed-8269721c768c" [ 880.569528] env[68638]: _type = "Task" [ 880.569528] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.575390] env[68638]: DEBUG oslo_vmware.api [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Waiting for the task: (returnval){ [ 880.575390] env[68638]: value = "task-2833747" [ 880.575390] env[68638]: _type = "Task" [ 880.575390] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.588313] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5212f2a9-f8a4-6d2b-5fed-8269721c768c, 'name': SearchDatastore_Task, 'duration_secs': 0.011112} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.591906] env[68638]: DEBUG oslo_concurrency.lockutils [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 880.592343] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] c07f6e3a-86cf-4584-aa5e-5adc4bf086e3/c07f6e3a-86cf-4584-aa5e-5adc4bf086e3.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 880.594264] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-33eefe4e-326b-4abd-99df-5be3640ade6e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.598395] env[68638]: DEBUG oslo_vmware.api [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833747, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.604355] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Waiting for the task: (returnval){ [ 880.604355] env[68638]: value = "task-2833748" [ 880.604355] env[68638]: _type = "Task" [ 880.604355] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.610036] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833746, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152629} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.613607] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 880.613846] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 880.614076] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 880.622934] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833748, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.978029] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: da306fdd-a5b4-4275-a482-f77cc008d780] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 881.000014] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a823ab7a-093c-49a4-9a98-c5197f343a33 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.009394] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc8e120-c741-4359-9140-c46273ec6a5c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.044703] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c5573b-eca9-44b7-88ab-93a8ab566af9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.054954] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b82ae59-b3e3-4291-a1e8-d243ed4839ef {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.074068] env[68638]: DEBUG nova.compute.provider_tree [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 881.090512] env[68638]: DEBUG oslo_vmware.api [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833747, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.124148] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833748, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.484169] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: a3b06e32-2670-4381-bb91-4597bfcabaa6] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 881.589358] env[68638]: DEBUG oslo_vmware.api [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833747, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.596365] env[68638]: ERROR nova.scheduler.client.report [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [req-e7d3e0a5-9ae7-4c3d-8f7a-4170ff2c5dcf] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e7d3e0a5-9ae7-4c3d-8f7a-4170ff2c5dcf"}]} [ 881.614992] env[68638]: DEBUG nova.scheduler.client.report [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 881.623970] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833748, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526062} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.624300] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] c07f6e3a-86cf-4584-aa5e-5adc4bf086e3/c07f6e3a-86cf-4584-aa5e-5adc4bf086e3.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 881.624560] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 881.627529] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7da48039-b1ad-4a5b-996a-f594059c2df7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.630435] env[68638]: DEBUG nova.scheduler.client.report [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 881.630688] env[68638]: DEBUG nova.compute.provider_tree [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 881.640109] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Waiting for the task: (returnval){ [ 881.640109] env[68638]: value = "task-2833749" [ 881.640109] env[68638]: _type = "Task" [ 881.640109] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.644673] env[68638]: DEBUG nova.scheduler.client.report [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 881.653719] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833749, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.662662] env[68638]: DEBUG nova.virt.hardware [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 881.662995] env[68638]: DEBUG nova.virt.hardware [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 881.663210] env[68638]: DEBUG nova.virt.hardware [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 881.663408] env[68638]: DEBUG nova.virt.hardware [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 881.663564] env[68638]: DEBUG nova.virt.hardware [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 881.663707] env[68638]: DEBUG nova.virt.hardware [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 881.663916] env[68638]: DEBUG nova.virt.hardware [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 881.664157] env[68638]: DEBUG nova.virt.hardware [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 881.664335] env[68638]: DEBUG nova.virt.hardware [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 881.664502] env[68638]: DEBUG nova.virt.hardware [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 881.664677] env[68638]: DEBUG nova.virt.hardware [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 881.665553] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb68a62-1f46-4eee-a233-84e7ed85254b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.668848] env[68638]: DEBUG nova.scheduler.client.report [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 881.676821] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ee5d08-9cd1-4f9a-acb8-5c193a7e24ac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.691165] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:29:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3a80436-f7a9-431a-acec-aca3d76e3f9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a9fa307a-55b9-4398-b9a3-75870a0519ca', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 881.698988] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 881.701672] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 881.702109] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be2f1f94-f115-49fc-b61b-8b8b3cacae5a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.724298] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 881.724298] env[68638]: value = "task-2833750" [ 881.724298] env[68638]: _type = "Task" [ 881.724298] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.735717] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833750, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.987624] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 2450602a-fde7-4a65-b7a2-be4195077758] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 882.091683] env[68638]: DEBUG oslo_vmware.api [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833747, 'name': PowerOnVM_Task, 'duration_secs': 1.084463} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.092018] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 882.092181] env[68638]: INFO nova.compute.manager [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Took 8.58 seconds to spawn the instance on the hypervisor. [ 882.092362] env[68638]: DEBUG nova.compute.manager [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 882.093208] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c9631aa-0d22-4648-8391-1a57f629f7d9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.148867] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31259178-d972-468b-b9c0-f4cf651f5f76 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.154629] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833749, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.301297} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.155267] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 882.156014] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60659880-d21f-4c81-8b8f-d2ab9b8f113c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.161214] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6fbb604-defc-482d-a6b6-bfd91a0f8fc4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.179055] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] c07f6e3a-86cf-4584-aa5e-5adc4bf086e3/c07f6e3a-86cf-4584-aa5e-5adc4bf086e3.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 882.179560] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78e801f4-c658-405f-82bb-8fbff6b3ae63 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.218045] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff9acc9-b2bb-4e14-a71e-ba441ffa4987 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.222848] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Waiting for the task: (returnval){ [ 882.222848] env[68638]: value = "task-2833751" [ 882.222848] env[68638]: _type = "Task" [ 882.222848] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.231015] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c45048-ea9f-4467-8589-8bed4938938c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.240284] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833751, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.251859] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833750, 'name': CreateVM_Task, 'duration_secs': 0.393327} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.252691] env[68638]: DEBUG nova.compute.provider_tree [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.253986] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 882.254686] env[68638]: DEBUG oslo_concurrency.lockutils [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.254852] env[68638]: DEBUG oslo_concurrency.lockutils [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 882.255352] env[68638]: DEBUG oslo_concurrency.lockutils [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 882.255610] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41986ff1-4655-4294-ba10-7a0421ab9ce2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.260896] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 882.260896] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52eaeccd-9add-7478-8259-8a1d8c30462f" [ 882.260896] env[68638]: _type = "Task" [ 882.260896] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.270577] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52eaeccd-9add-7478-8259-8a1d8c30462f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.491031] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 94a33fcd-69b6-443b-9c86-5129e30b5b0d] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 882.612937] env[68638]: INFO nova.compute.manager [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Took 56.24 seconds to build instance. [ 882.735053] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833751, 'name': ReconfigVM_Task, 'duration_secs': 0.268754} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.735596] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Reconfigured VM instance instance-00000043 to attach disk [datastore1] c07f6e3a-86cf-4584-aa5e-5adc4bf086e3/c07f6e3a-86cf-4584-aa5e-5adc4bf086e3.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 882.736020] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ba5d0c34-f274-413e-a67e-c34a42af874a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.743745] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Waiting for the task: (returnval){ [ 882.743745] env[68638]: value = "task-2833752" [ 882.743745] env[68638]: _type = "Task" [ 882.743745] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.752070] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833752, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.757031] env[68638]: DEBUG nova.scheduler.client.report [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 882.771246] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52eaeccd-9add-7478-8259-8a1d8c30462f, 'name': SearchDatastore_Task, 'duration_secs': 0.011166} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.771246] env[68638]: DEBUG oslo_concurrency.lockutils [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 882.771623] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 882.771745] env[68638]: DEBUG oslo_concurrency.lockutils [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.771904] env[68638]: DEBUG oslo_concurrency.lockutils [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 882.772146] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 882.773012] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8847f5b-6d5e-4448-af72-13a85979b70d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.783605] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 882.783802] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 882.784714] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfe70115-19da-4db3-9be4-9f1ef4b5ec82 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.791360] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 882.791360] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5292d6f5-b3d5-4f3d-a228-3055ecf953b1" [ 882.791360] env[68638]: _type = "Task" [ 882.791360] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.800260] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5292d6f5-b3d5-4f3d-a228-3055ecf953b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.994542] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 8992f062-c28f-4ac8-8d0d-0c51c3784e88] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 883.116482] env[68638]: DEBUG oslo_concurrency.lockutils [None req-68cbe7f2-4950-4426-80af-2a53228facd5 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Lock "d49fdd3f-3ad6-4396-811f-67f1ef1f2940" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.699s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.134312] env[68638]: DEBUG nova.compute.manager [req-e5e382d1-4764-4ef7-8de7-b616c8e4b4b0 req-13629144-4154-47e9-b983-08b0ff4efffb service nova] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Received event network-changed-98fe1cde-b2f1-4fe7-9f25-74077ad59399 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 883.134447] env[68638]: DEBUG nova.compute.manager [req-e5e382d1-4764-4ef7-8de7-b616c8e4b4b0 req-13629144-4154-47e9-b983-08b0ff4efffb service nova] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Refreshing instance network info cache due to event network-changed-98fe1cde-b2f1-4fe7-9f25-74077ad59399. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 883.134661] env[68638]: DEBUG oslo_concurrency.lockutils [req-e5e382d1-4764-4ef7-8de7-b616c8e4b4b0 req-13629144-4154-47e9-b983-08b0ff4efffb service nova] Acquiring lock "refresh_cache-d49fdd3f-3ad6-4396-811f-67f1ef1f2940" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.134802] env[68638]: DEBUG oslo_concurrency.lockutils [req-e5e382d1-4764-4ef7-8de7-b616c8e4b4b0 req-13629144-4154-47e9-b983-08b0ff4efffb service nova] Acquired lock "refresh_cache-d49fdd3f-3ad6-4396-811f-67f1ef1f2940" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.134970] env[68638]: DEBUG nova.network.neutron [req-e5e382d1-4764-4ef7-8de7-b616c8e4b4b0 req-13629144-4154-47e9-b983-08b0ff4efffb service nova] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Refreshing network info cache for port 98fe1cde-b2f1-4fe7-9f25-74077ad59399 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 883.254019] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833752, 'name': Rename_Task, 'duration_secs': 0.182472} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.254333] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 883.254628] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1300f21a-2395-4d30-9622-b11c412b7d53 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.266478] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Waiting for the task: (returnval){ [ 883.266478] env[68638]: value = "task-2833753" [ 883.266478] env[68638]: _type = "Task" [ 883.266478] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.275953] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833753, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.304702] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5292d6f5-b3d5-4f3d-a228-3055ecf953b1, 'name': SearchDatastore_Task, 'duration_secs': 0.010565} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.305854] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00bb39fd-6624-4982-86c6-1f5a05ece583 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.312183] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 883.312183] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5213a4a8-7780-cd63-bc98-3bc2b767b2c1" [ 883.312183] env[68638]: _type = "Task" [ 883.312183] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.322179] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5213a4a8-7780-cd63-bc98-3bc2b767b2c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.498231] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 5294e1b6-f34f-4f91-aa3e-e0276ad982ee] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 883.618015] env[68638]: DEBUG nova.compute.manager [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 883.767855] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 4.320s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.773536] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.861s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.774985] env[68638]: INFO nova.compute.claims [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 883.797712] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833753, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.824134] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5213a4a8-7780-cd63-bc98-3bc2b767b2c1, 'name': SearchDatastore_Task, 'duration_secs': 0.012088} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.824742] env[68638]: DEBUG oslo_concurrency.lockutils [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 883.827050] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 4edaaa5d-535a-4c63-ab44-724548a0f3eb/4edaaa5d-535a-4c63-ab44-724548a0f3eb.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 883.827050] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b76d8b9b-1463-4460-8c56-43215a5eb15e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.835040] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e20e9c3b-0d23-4ac2-98a2-60d33e341985 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquiring lock "d49fdd3f-3ad6-4396-811f-67f1ef1f2940" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.835040] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e20e9c3b-0d23-4ac2-98a2-60d33e341985 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Lock "d49fdd3f-3ad6-4396-811f-67f1ef1f2940" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.835040] env[68638]: INFO nova.compute.manager [None req-e20e9c3b-0d23-4ac2-98a2-60d33e341985 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Rebooting instance [ 883.837374] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 883.837374] env[68638]: value = "task-2833754" [ 883.837374] env[68638]: _type = "Task" [ 883.837374] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.851872] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833754, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.888330] env[68638]: DEBUG nova.network.neutron [req-e5e382d1-4764-4ef7-8de7-b616c8e4b4b0 req-13629144-4154-47e9-b983-08b0ff4efffb service nova] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Updated VIF entry in instance network info cache for port 98fe1cde-b2f1-4fe7-9f25-74077ad59399. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 883.888778] env[68638]: DEBUG nova.network.neutron [req-e5e382d1-4764-4ef7-8de7-b616c8e4b4b0 req-13629144-4154-47e9-b983-08b0ff4efffb service nova] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Updating instance_info_cache with network_info: [{"id": "98fe1cde-b2f1-4fe7-9f25-74077ad59399", "address": "fa:16:3e:ab:24:ed", "network": {"id": "6122cb68-111c-4c39-b9c0-ffc1af5bd833", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-363451206-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d95966c092754deca9ed66c97041235b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98fe1cde-b2", "ovs_interfaceid": "98fe1cde-b2f1-4fe7-9f25-74077ad59399", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.001664] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 072be237-c51e-43d2-ad84-46122ef9f335] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 884.146231] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.287071] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833753, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.352399] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833754, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.361694] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e20e9c3b-0d23-4ac2-98a2-60d33e341985 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquiring lock "refresh_cache-d49fdd3f-3ad6-4396-811f-67f1ef1f2940" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.391710] env[68638]: DEBUG oslo_concurrency.lockutils [req-e5e382d1-4764-4ef7-8de7-b616c8e4b4b0 req-13629144-4154-47e9-b983-08b0ff4efffb service nova] Releasing lock "refresh_cache-d49fdd3f-3ad6-4396-811f-67f1ef1f2940" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 884.392290] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e20e9c3b-0d23-4ac2-98a2-60d33e341985 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquired lock "refresh_cache-d49fdd3f-3ad6-4396-811f-67f1ef1f2940" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 884.392561] env[68638]: DEBUG nova.network.neutron [None req-e20e9c3b-0d23-4ac2-98a2-60d33e341985 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 884.505304] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 06a1a44f-35ee-45d2-9503-23468150b72f] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 884.785281] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833753, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.854882] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833754, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.553104} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.855171] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 4edaaa5d-535a-4c63-ab44-724548a0f3eb/4edaaa5d-535a-4c63-ab44-724548a0f3eb.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 884.855396] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 884.855663] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c995b1a3-758d-44cf-acac-b708872ec6ac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.867737] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 884.867737] env[68638]: value = "task-2833755" [ 884.867737] env[68638]: _type = "Task" [ 884.867737] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.878425] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833755, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.008593] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: b9736ec5-6332-4202-95d6-a3cd1d1f11d7] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 885.108615] env[68638]: DEBUG nova.network.neutron [None req-e20e9c3b-0d23-4ac2-98a2-60d33e341985 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Updating instance_info_cache with network_info: [{"id": "98fe1cde-b2f1-4fe7-9f25-74077ad59399", "address": "fa:16:3e:ab:24:ed", "network": {"id": "6122cb68-111c-4c39-b9c0-ffc1af5bd833", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-363451206-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d95966c092754deca9ed66c97041235b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98fe1cde-b2", "ovs_interfaceid": "98fe1cde-b2f1-4fe7-9f25-74077ad59399", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.258346] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7606a40-cdbc-4672-991e-fa4c822898c4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.266821] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f9d12da-4772-48ac-b266-d66033a9a0e3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.302720] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-232847f6-5b8a-4a07-8d05-5116e1a46218 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.319602] env[68638]: DEBUG oslo_vmware.api [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833753, 'name': PowerOnVM_Task, 'duration_secs': 1.930727} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.320656] env[68638]: INFO nova.compute.manager [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Swapping old allocation on dict_keys(['a03d7c1f-9953-43da-98b9-91e5cea1f9ff']) held by migration e1da74ab-012b-46a6-9b56-2cbd2d894fe2 for instance [ 885.321030] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 885.321364] env[68638]: DEBUG nova.compute.manager [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 885.322731] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-688707c8-bcbb-499a-9fa9-a2a045ad9842 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.329181] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f39dbfb-145d-47ad-9726-525434142a62 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.347967] env[68638]: DEBUG nova.compute.provider_tree [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 885.363056] env[68638]: DEBUG nova.scheduler.client.report [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Overwriting current allocation {'allocations': {'a03d7c1f-9953-43da-98b9-91e5cea1f9ff': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 97}}, 'project_id': 'e92752f6508d4e0eae7e29247444a38f', 'user_id': '34d3f08ca4c44eecb3238404c3728f0c', 'consumer_generation': 1} on consumer a09c4492-34fd-4010-b547-bfb5b61f252d {{(pid=68638) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 885.378099] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833755, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071845} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.378971] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 885.379771] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-611c0069-2219-4803-9d1f-0e29d58ac8b2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.403188] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] 4edaaa5d-535a-4c63-ab44-724548a0f3eb/4edaaa5d-535a-4c63-ab44-724548a0f3eb.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 885.403772] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-749e4b0e-2d7c-42ba-8fa6-51457697a164 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.428602] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 885.428602] env[68638]: value = "task-2833756" [ 885.428602] env[68638]: _type = "Task" [ 885.428602] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.437123] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833756, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.483375] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "refresh_cache-a09c4492-34fd-4010-b547-bfb5b61f252d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.483561] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquired lock "refresh_cache-a09c4492-34fd-4010-b547-bfb5b61f252d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.483778] env[68638]: DEBUG nova.network.neutron [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 885.520151] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 24982641-40ec-4fab-8385-1bc9dea6ade1] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 885.611717] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e20e9c3b-0d23-4ac2-98a2-60d33e341985 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Releasing lock "refresh_cache-d49fdd3f-3ad6-4396-811f-67f1ef1f2940" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.854033] env[68638]: DEBUG nova.scheduler.client.report [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 885.858988] env[68638]: DEBUG oslo_concurrency.lockutils [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 885.939641] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833756, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.022718] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 20f2c343-1f32-4c36-b4a9-8f009b6ac326] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 886.110892] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Acquiring lock "c07f6e3a-86cf-4584-aa5e-5adc4bf086e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.111158] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Lock "c07f6e3a-86cf-4584-aa5e-5adc4bf086e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.111361] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Acquiring lock "c07f6e3a-86cf-4584-aa5e-5adc4bf086e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.111538] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Lock "c07f6e3a-86cf-4584-aa5e-5adc4bf086e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.111699] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Lock "c07f6e3a-86cf-4584-aa5e-5adc4bf086e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 886.114012] env[68638]: INFO nova.compute.manager [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Terminating instance [ 886.118902] env[68638]: DEBUG nova.compute.manager [None req-e20e9c3b-0d23-4ac2-98a2-60d33e341985 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 886.118902] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a15f9d62-0bbf-4ad2-8fda-0784a20b4429 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.198770] env[68638]: DEBUG nova.network.neutron [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Updating instance_info_cache with network_info: [{"id": "0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce", "address": "fa:16:3e:e4:6e:8f", "network": {"id": "98226029-b102-49ba-b879-744ac272533a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.107", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "22dd1a47cca5452a966546749e7b8700", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e7e9cd6-7e", "ovs_interfaceid": "0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.360604] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.587s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 886.361167] env[68638]: DEBUG nova.compute.manager [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 886.363774] env[68638]: DEBUG oslo_concurrency.lockutils [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.700s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.366110] env[68638]: INFO nova.compute.claims [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 886.455051] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833756, 'name': ReconfigVM_Task, 'duration_secs': 0.677711} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.455366] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Reconfigured VM instance instance-0000001c to attach disk [datastore1] 4edaaa5d-535a-4c63-ab44-724548a0f3eb/4edaaa5d-535a-4c63-ab44-724548a0f3eb.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 886.456019] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ffc91f4f-d5db-4c75-8bce-a4e32dcfff63 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.464331] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 886.464331] env[68638]: value = "task-2833757" [ 886.464331] env[68638]: _type = "Task" [ 886.464331] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.472945] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833757, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.525642] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: ebd7dd7a-2565-45da-bf7a-b8047c54ebe4] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 886.618482] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Acquiring lock "refresh_cache-c07f6e3a-86cf-4584-aa5e-5adc4bf086e3" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.618694] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Acquired lock "refresh_cache-c07f6e3a-86cf-4584-aa5e-5adc4bf086e3" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 886.618861] env[68638]: DEBUG nova.network.neutron [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 886.701256] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Releasing lock "refresh_cache-a09c4492-34fd-4010-b547-bfb5b61f252d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 886.701748] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 886.702094] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d1d4d7f5-8a4c-434f-a5d1-047e12d9bc80 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.711053] env[68638]: DEBUG oslo_vmware.api [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 886.711053] env[68638]: value = "task-2833758" [ 886.711053] env[68638]: _type = "Task" [ 886.711053] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.719973] env[68638]: DEBUG oslo_vmware.api [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833758, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.869933] env[68638]: DEBUG nova.compute.utils [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 886.873566] env[68638]: DEBUG nova.compute.manager [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 886.873751] env[68638]: DEBUG nova.network.neutron [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 886.911955] env[68638]: DEBUG nova.policy [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2bc01f1cef634944869675af4a80a273', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '65b68c82952f43fd8a028fd76de8aafd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 886.976790] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833757, 'name': Rename_Task, 'duration_secs': 0.140602} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.977084] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 886.977336] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d0a742e8-edcb-4032-b72d-39e89beb7474 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.985310] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 886.985310] env[68638]: value = "task-2833759" [ 886.985310] env[68638]: _type = "Task" [ 886.985310] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.994142] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833759, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.029498] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: f43dae1e-3442-450a-b9e8-3884504a2b38] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 887.133202] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34256f49-4a7b-4939-9500-e47de0ddeb1f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.141508] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e20e9c3b-0d23-4ac2-98a2-60d33e341985 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Doing hard reboot of VM {{(pid=68638) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 887.142387] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-1a689a5c-db20-4ea7-ae00-3f1dc285420e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.144357] env[68638]: DEBUG nova.network.neutron [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 887.152028] env[68638]: DEBUG oslo_vmware.api [None req-e20e9c3b-0d23-4ac2-98a2-60d33e341985 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Waiting for the task: (returnval){ [ 887.152028] env[68638]: value = "task-2833760" [ 887.152028] env[68638]: _type = "Task" [ 887.152028] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.160676] env[68638]: DEBUG oslo_vmware.api [None req-e20e9c3b-0d23-4ac2-98a2-60d33e341985 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833760, 'name': ResetVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.204090] env[68638]: DEBUG nova.network.neutron [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.207135] env[68638]: DEBUG nova.network.neutron [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Successfully created port: bbd1ffd1-e93a-4d6f-941a-ea6d996baf65 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 887.220730] env[68638]: DEBUG oslo_vmware.api [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833758, 'name': PowerOffVM_Task, 'duration_secs': 0.192582} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.221620] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 887.222343] env[68638]: DEBUG nova.virt.hardware [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:29:41Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='a203cce6-fe96-4a10-ad18-80d29521d33f',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-493947233',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 887.222563] env[68638]: DEBUG nova.virt.hardware [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 887.222717] env[68638]: DEBUG nova.virt.hardware [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 887.222900] env[68638]: DEBUG nova.virt.hardware [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 887.223057] env[68638]: DEBUG nova.virt.hardware [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 887.223211] env[68638]: DEBUG nova.virt.hardware [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 887.223413] env[68638]: DEBUG nova.virt.hardware [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 887.223568] env[68638]: DEBUG nova.virt.hardware [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 887.223735] env[68638]: DEBUG nova.virt.hardware [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 887.223895] env[68638]: DEBUG nova.virt.hardware [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 887.224149] env[68638]: DEBUG nova.virt.hardware [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 887.229948] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd08b2a2-d480-47b6-a39c-25038229650f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.251429] env[68638]: DEBUG oslo_vmware.api [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 887.251429] env[68638]: value = "task-2833761" [ 887.251429] env[68638]: _type = "Task" [ 887.251429] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.264977] env[68638]: DEBUG oslo_vmware.api [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833761, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.377075] env[68638]: DEBUG nova.compute.manager [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 887.500591] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833759, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.533082] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 5a28d684-584b-4e13-9910-183119ce5d37] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 887.662708] env[68638]: DEBUG oslo_vmware.api [None req-e20e9c3b-0d23-4ac2-98a2-60d33e341985 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833760, 'name': ResetVM_Task, 'duration_secs': 0.105147} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.662953] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e20e9c3b-0d23-4ac2-98a2-60d33e341985 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Did hard reboot of VM {{(pid=68638) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 887.663172] env[68638]: DEBUG nova.compute.manager [None req-e20e9c3b-0d23-4ac2-98a2-60d33e341985 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 887.666330] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a187ed6b-f0e7-41d1-b4e6-f9a5ba238d87 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.706009] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Releasing lock "refresh_cache-c07f6e3a-86cf-4584-aa5e-5adc4bf086e3" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 887.706471] env[68638]: DEBUG nova.compute.manager [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 887.706701] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 887.707820] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03a0199-0e15-4cc1-956e-a4ed44b2d7e8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.717973] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 887.718291] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d798a764-611a-4506-a185-65a5e79e05b6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.729030] env[68638]: DEBUG oslo_vmware.api [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Waiting for the task: (returnval){ [ 887.729030] env[68638]: value = "task-2833762" [ 887.729030] env[68638]: _type = "Task" [ 887.729030] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.738133] env[68638]: DEBUG oslo_vmware.api [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833762, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.769934] env[68638]: DEBUG oslo_vmware.api [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833761, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.859380] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec1609e2-1052-42c1-9d00-362a60edf25c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.868077] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-035e6de1-9571-4644-af71-99bff8241bd8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.904616] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba8dd31d-d2b2-4d6e-ad6f-7a1aa2e52ccb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.914308] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f231797f-faec-4c2a-aaa0-817cfca6b15a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.930221] env[68638]: DEBUG nova.compute.provider_tree [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 887.997464] env[68638]: DEBUG oslo_vmware.api [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833759, 'name': PowerOnVM_Task, 'duration_secs': 0.782263} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.997622] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 887.997812] env[68638]: DEBUG nova.compute.manager [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 887.998731] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83832929-4f85-4b89-b2ac-fd25ead11a01 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.037428] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 7b0b6eec-4681-4926-ad3f-5572e022a467] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 888.181555] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e20e9c3b-0d23-4ac2-98a2-60d33e341985 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Lock "d49fdd3f-3ad6-4396-811f-67f1ef1f2940" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.347s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 888.244500] env[68638]: DEBUG oslo_vmware.api [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833762, 'name': PowerOffVM_Task, 'duration_secs': 0.23796} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.245853] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 888.246099] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 888.247306] env[68638]: DEBUG nova.compute.manager [req-35e096da-1813-4cf4-86b6-8fff159a705b req-86d38346-a289-4a88-8b5c-9113d3c51743 service nova] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Received event network-changed-98fe1cde-b2f1-4fe7-9f25-74077ad59399 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 888.247489] env[68638]: DEBUG nova.compute.manager [req-35e096da-1813-4cf4-86b6-8fff159a705b req-86d38346-a289-4a88-8b5c-9113d3c51743 service nova] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Refreshing instance network info cache due to event network-changed-98fe1cde-b2f1-4fe7-9f25-74077ad59399. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 888.247701] env[68638]: DEBUG oslo_concurrency.lockutils [req-35e096da-1813-4cf4-86b6-8fff159a705b req-86d38346-a289-4a88-8b5c-9113d3c51743 service nova] Acquiring lock "refresh_cache-d49fdd3f-3ad6-4396-811f-67f1ef1f2940" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.247844] env[68638]: DEBUG oslo_concurrency.lockutils [req-35e096da-1813-4cf4-86b6-8fff159a705b req-86d38346-a289-4a88-8b5c-9113d3c51743 service nova] Acquired lock "refresh_cache-d49fdd3f-3ad6-4396-811f-67f1ef1f2940" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 888.248012] env[68638]: DEBUG nova.network.neutron [req-35e096da-1813-4cf4-86b6-8fff159a705b req-86d38346-a289-4a88-8b5c-9113d3c51743 service nova] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Refreshing network info cache for port 98fe1cde-b2f1-4fe7-9f25-74077ad59399 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 888.249673] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc079936-59de-4815-933d-7b8a438ec608 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.267257] env[68638]: DEBUG oslo_vmware.api [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833761, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.282625] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 888.282972] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 888.283102] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Deleting the datastore file [datastore1] c07f6e3a-86cf-4584-aa5e-5adc4bf086e3 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 888.283311] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2ee7f4c7-955b-4009-a2fa-a4d4146c96d9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.291744] env[68638]: DEBUG oslo_vmware.api [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Waiting for the task: (returnval){ [ 888.291744] env[68638]: value = "task-2833764" [ 888.291744] env[68638]: _type = "Task" [ 888.291744] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.300127] env[68638]: DEBUG oslo_vmware.api [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833764, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.410074] env[68638]: DEBUG nova.compute.manager [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 888.433111] env[68638]: DEBUG nova.scheduler.client.report [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 888.444291] env[68638]: DEBUG nova.virt.hardware [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 888.444571] env[68638]: DEBUG nova.virt.hardware [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 888.444730] env[68638]: DEBUG nova.virt.hardware [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 888.444911] env[68638]: DEBUG nova.virt.hardware [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 888.445119] env[68638]: DEBUG nova.virt.hardware [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 888.445284] env[68638]: DEBUG nova.virt.hardware [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 888.445512] env[68638]: DEBUG nova.virt.hardware [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 888.445675] env[68638]: DEBUG nova.virt.hardware [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 888.445846] env[68638]: DEBUG nova.virt.hardware [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 888.446018] env[68638]: DEBUG nova.virt.hardware [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 888.446200] env[68638]: DEBUG nova.virt.hardware [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 888.447113] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaf9fa1b-4bdc-4ece-abf0-91c49716650d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.456605] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-083cb9c2-5cc9-4911-a8cf-348c8fddc0ec {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.517316] env[68638]: DEBUG oslo_concurrency.lockutils [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.540395] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: aaf0185b-1a85-4e0e-afb1-55e9e2417d76] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 888.766395] env[68638]: DEBUG oslo_vmware.api [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833761, 'name': ReconfigVM_Task, 'duration_secs': 1.209064} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.767233] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1626af0-adcd-4d97-a84a-4b65dba9942d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.786218] env[68638]: DEBUG nova.virt.hardware [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:29:41Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='a203cce6-fe96-4a10-ad18-80d29521d33f',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-493947233',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 888.786867] env[68638]: DEBUG nova.virt.hardware [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 888.786867] env[68638]: DEBUG nova.virt.hardware [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 888.786867] env[68638]: DEBUG nova.virt.hardware [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 888.787022] env[68638]: DEBUG nova.virt.hardware [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 888.787055] env[68638]: DEBUG nova.virt.hardware [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 888.787400] env[68638]: DEBUG nova.virt.hardware [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 888.787400] env[68638]: DEBUG nova.virt.hardware [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 888.787582] env[68638]: DEBUG nova.virt.hardware [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 888.787715] env[68638]: DEBUG nova.virt.hardware [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 888.787892] env[68638]: DEBUG nova.virt.hardware [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 888.790705] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1cb28f0-3e92-49cc-a1b0-68ab6b501ded {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.801424] env[68638]: DEBUG oslo_vmware.api [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Task: {'id': task-2833764, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142966} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.802281] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 888.802496] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 888.802706] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 888.802906] env[68638]: INFO nova.compute.manager [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Took 1.10 seconds to destroy the instance on the hypervisor. [ 888.803295] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 888.803496] env[68638]: DEBUG oslo_vmware.api [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 888.803496] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5284bb00-450b-8795-e947-dd7fb708beb4" [ 888.803496] env[68638]: _type = "Task" [ 888.803496] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.803762] env[68638]: DEBUG nova.compute.manager [-] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 888.803869] env[68638]: DEBUG nova.network.neutron [-] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 888.816726] env[68638]: DEBUG oslo_vmware.api [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5284bb00-450b-8795-e947-dd7fb708beb4, 'name': SearchDatastore_Task, 'duration_secs': 0.007742} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.822382] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Reconfiguring VM instance instance-0000002f to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 888.822648] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53f69a46-458e-4ce7-8ffa-b1ac73daff6d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.837970] env[68638]: DEBUG nova.network.neutron [-] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 888.846789] env[68638]: DEBUG oslo_vmware.api [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 888.846789] env[68638]: value = "task-2833765" [ 888.846789] env[68638]: _type = "Task" [ 888.846789] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.861951] env[68638]: DEBUG oslo_vmware.api [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833765, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.862345] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquiring lock "d49fdd3f-3ad6-4396-811f-67f1ef1f2940" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.862576] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Lock "d49fdd3f-3ad6-4396-811f-67f1ef1f2940" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 888.862765] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquiring lock "d49fdd3f-3ad6-4396-811f-67f1ef1f2940-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.862941] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Lock "d49fdd3f-3ad6-4396-811f-67f1ef1f2940-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 888.863120] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Lock "d49fdd3f-3ad6-4396-811f-67f1ef1f2940-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 888.867508] env[68638]: INFO nova.compute.manager [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Terminating instance [ 888.904545] env[68638]: DEBUG nova.network.neutron [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Successfully updated port: bbd1ffd1-e93a-4d6f-941a-ea6d996baf65 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 888.939737] env[68638]: DEBUG oslo_concurrency.lockutils [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.576s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 888.940307] env[68638]: DEBUG nova.compute.manager [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 888.943584] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.746s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 888.945107] env[68638]: INFO nova.compute.claims [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 889.043577] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 4eb4360a-46a8-440b-b300-4724c3497ff2] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 889.048992] env[68638]: DEBUG nova.network.neutron [req-35e096da-1813-4cf4-86b6-8fff159a705b req-86d38346-a289-4a88-8b5c-9113d3c51743 service nova] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Updated VIF entry in instance network info cache for port 98fe1cde-b2f1-4fe7-9f25-74077ad59399. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 889.049397] env[68638]: DEBUG nova.network.neutron [req-35e096da-1813-4cf4-86b6-8fff159a705b req-86d38346-a289-4a88-8b5c-9113d3c51743 service nova] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Updating instance_info_cache with network_info: [{"id": "98fe1cde-b2f1-4fe7-9f25-74077ad59399", "address": "fa:16:3e:ab:24:ed", "network": {"id": "6122cb68-111c-4c39-b9c0-ffc1af5bd833", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-363451206-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d95966c092754deca9ed66c97041235b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7ab8d568-adb0-4f3b-b6cc-68413e6546ae", "external-id": "nsx-vlan-transportzone-86", "segmentation_id": 86, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98fe1cde-b2", "ovs_interfaceid": "98fe1cde-b2f1-4fe7-9f25-74077ad59399", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.341808] env[68638]: DEBUG nova.network.neutron [-] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.357882] env[68638]: DEBUG oslo_vmware.api [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833765, 'name': ReconfigVM_Task, 'duration_secs': 0.211709} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.358233] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Reconfigured VM instance instance-0000002f to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 889.359175] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92a5b47-03c9-4b57-b729-ca6775783636 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.376654] env[68638]: DEBUG nova.compute.manager [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 889.376654] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 889.384662] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] a09c4492-34fd-4010-b547-bfb5b61f252d/a09c4492-34fd-4010-b547-bfb5b61f252d.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 889.385697] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4155a2a3-ee04-429f-9b30-de1ea425b1a2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.388408] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-192cb299-7a9a-4267-86c4-740f0073bcff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.407132] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Acquiring lock "refresh_cache-92c90438-f7cc-4a48-bfac-f7912709cf88" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.407296] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Acquired lock "refresh_cache-92c90438-f7cc-4a48-bfac-f7912709cf88" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 889.407640] env[68638]: DEBUG nova.network.neutron [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 889.408773] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 889.410464] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b271c917-550b-4317-bb9e-31cf7ebbc42c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.412284] env[68638]: DEBUG oslo_vmware.api [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 889.412284] env[68638]: value = "task-2833766" [ 889.412284] env[68638]: _type = "Task" [ 889.412284] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.418555] env[68638]: DEBUG oslo_vmware.api [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Waiting for the task: (returnval){ [ 889.418555] env[68638]: value = "task-2833767" [ 889.418555] env[68638]: _type = "Task" [ 889.418555] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.422154] env[68638]: DEBUG oslo_vmware.api [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833766, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.432254] env[68638]: DEBUG oslo_vmware.api [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833767, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.451608] env[68638]: DEBUG nova.compute.utils [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 889.455673] env[68638]: DEBUG nova.compute.manager [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 889.455741] env[68638]: DEBUG nova.network.neutron [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 889.499633] env[68638]: DEBUG nova.policy [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fdfdcc628e4e40b586b0b71bc0ed5b19', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d2c1dcc55dd42c5b791dd8f1841479b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 889.547413] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: ac0141c2-aef6-4edf-913a-d4a41b502c10] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 889.552230] env[68638]: DEBUG oslo_concurrency.lockutils [req-35e096da-1813-4cf4-86b6-8fff159a705b req-86d38346-a289-4a88-8b5c-9113d3c51743 service nova] Releasing lock "refresh_cache-d49fdd3f-3ad6-4396-811f-67f1ef1f2940" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 889.743399] env[68638]: INFO nova.compute.manager [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Rebuilding instance [ 889.802665] env[68638]: DEBUG nova.compute.manager [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 889.806544] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b191362f-ef65-4f81-927c-b8a65b8ef33e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.844211] env[68638]: INFO nova.compute.manager [-] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Took 1.04 seconds to deallocate network for instance. [ 889.849922] env[68638]: DEBUG nova.network.neutron [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Successfully created port: 41008c1d-c94d-416f-8c08-9f52170f20c0 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 889.924502] env[68638]: DEBUG oslo_vmware.api [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833766, 'name': ReconfigVM_Task, 'duration_secs': 0.280999} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.928494] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Reconfigured VM instance instance-0000002f to attach disk [datastore2] a09c4492-34fd-4010-b547-bfb5b61f252d/a09c4492-34fd-4010-b547-bfb5b61f252d.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 889.929576] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf5d4366-1368-402e-852e-529dfa25ef6a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.950564] env[68638]: DEBUG nova.network.neutron [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 889.955993] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa3d5e51-81ef-4685-9224-c880c931da94 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.958853] env[68638]: DEBUG nova.compute.manager [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 889.961451] env[68638]: DEBUG oslo_vmware.api [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833767, 'name': PowerOffVM_Task, 'duration_secs': 0.245904} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.964917] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 889.964917] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 889.967603] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b66c41b4-6a3b-4626-abbf-1eb0ec1f10ae {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.982500] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff671930-5089-496b-b100-2e6bf7e7bcde {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.011629] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d303a968-369b-4725-abf0-4a5ca76597e1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.026310] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 890.029168] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa99d2a6-2dfb-4572-b7f5-51d83eea88ff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.035756] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 890.035987] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 890.036186] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Deleting the datastore file [datastore1] d49fdd3f-3ad6-4396-811f-67f1ef1f2940 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 890.036466] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6376bb92-40cd-45e8-ab33-d68152140d0b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.041057] env[68638]: DEBUG oslo_vmware.api [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 890.041057] env[68638]: value = "task-2833769" [ 890.041057] env[68638]: _type = "Task" [ 890.041057] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.048529] env[68638]: DEBUG oslo_vmware.api [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Waiting for the task: (returnval){ [ 890.048529] env[68638]: value = "task-2833770" [ 890.048529] env[68638]: _type = "Task" [ 890.048529] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.055460] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 1eee31b7-db8b-4765-8cc2-4273717ef86e] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 890.057821] env[68638]: DEBUG oslo_vmware.api [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833769, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.067350] env[68638]: DEBUG oslo_vmware.api [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833770, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.135559] env[68638]: DEBUG nova.network.neutron [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Updating instance_info_cache with network_info: [{"id": "bbd1ffd1-e93a-4d6f-941a-ea6d996baf65", "address": "fa:16:3e:85:30:a3", "network": {"id": "53762239-2606-4571-80da-42157aed6438", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1319565120-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65b68c82952f43fd8a028fd76de8aafd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afb671bc-328c-40bf-9c2a-d98695e3d60c", "external-id": "nsx-vlan-transportzone-920", "segmentation_id": 920, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbd1ffd1-e9", "ovs_interfaceid": "bbd1ffd1-e93a-4d6f-941a-ea6d996baf65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.273322] env[68638]: DEBUG nova.compute.manager [req-54568340-0aa0-4017-a1e5-49ad4bd9660d req-bb70d37b-223f-4148-895d-865d8e26569e service nova] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Received event network-vif-plugged-bbd1ffd1-e93a-4d6f-941a-ea6d996baf65 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 890.273440] env[68638]: DEBUG oslo_concurrency.lockutils [req-54568340-0aa0-4017-a1e5-49ad4bd9660d req-bb70d37b-223f-4148-895d-865d8e26569e service nova] Acquiring lock "92c90438-f7cc-4a48-bfac-f7912709cf88-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.273604] env[68638]: DEBUG oslo_concurrency.lockutils [req-54568340-0aa0-4017-a1e5-49ad4bd9660d req-bb70d37b-223f-4148-895d-865d8e26569e service nova] Lock "92c90438-f7cc-4a48-bfac-f7912709cf88-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.273808] env[68638]: DEBUG oslo_concurrency.lockutils [req-54568340-0aa0-4017-a1e5-49ad4bd9660d req-bb70d37b-223f-4148-895d-865d8e26569e service nova] Lock "92c90438-f7cc-4a48-bfac-f7912709cf88-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.273965] env[68638]: DEBUG nova.compute.manager [req-54568340-0aa0-4017-a1e5-49ad4bd9660d req-bb70d37b-223f-4148-895d-865d8e26569e service nova] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] No waiting events found dispatching network-vif-plugged-bbd1ffd1-e93a-4d6f-941a-ea6d996baf65 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 890.274127] env[68638]: WARNING nova.compute.manager [req-54568340-0aa0-4017-a1e5-49ad4bd9660d req-bb70d37b-223f-4148-895d-865d8e26569e service nova] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Received unexpected event network-vif-plugged-bbd1ffd1-e93a-4d6f-941a-ea6d996baf65 for instance with vm_state building and task_state spawning. [ 890.274311] env[68638]: DEBUG nova.compute.manager [req-54568340-0aa0-4017-a1e5-49ad4bd9660d req-bb70d37b-223f-4148-895d-865d8e26569e service nova] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Received event network-changed-bbd1ffd1-e93a-4d6f-941a-ea6d996baf65 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 890.274593] env[68638]: DEBUG nova.compute.manager [req-54568340-0aa0-4017-a1e5-49ad4bd9660d req-bb70d37b-223f-4148-895d-865d8e26569e service nova] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Refreshing instance network info cache due to event network-changed-bbd1ffd1-e93a-4d6f-941a-ea6d996baf65. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 890.274629] env[68638]: DEBUG oslo_concurrency.lockutils [req-54568340-0aa0-4017-a1e5-49ad4bd9660d req-bb70d37b-223f-4148-895d-865d8e26569e service nova] Acquiring lock "refresh_cache-92c90438-f7cc-4a48-bfac-f7912709cf88" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.351870] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.454799] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae53e992-e81c-4091-90f1-ec3126979944 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.467279] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f9c882-49a1-4566-98d3-2d920a99d94d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.501075] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f37835b-b4be-41d1-97af-49bf1bbf7a02 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.510395] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a820501c-6820-4558-92b8-486e23bc78b8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.526575] env[68638]: DEBUG nova.compute.provider_tree [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 890.556326] env[68638]: DEBUG oslo_vmware.api [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833769, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.561915] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: f40d0f9a-ccd7-4ef0-914f-cc9bcc6507e8] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 890.563701] env[68638]: DEBUG oslo_vmware.api [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833770, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.236255} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.564160] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 890.564348] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 890.564526] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 890.564703] env[68638]: INFO nova.compute.manager [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Took 1.19 seconds to destroy the instance on the hypervisor. [ 890.564919] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 890.565202] env[68638]: DEBUG nova.compute.manager [-] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 890.565279] env[68638]: DEBUG nova.network.neutron [-] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 890.638204] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Releasing lock "refresh_cache-92c90438-f7cc-4a48-bfac-f7912709cf88" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 890.639254] env[68638]: DEBUG nova.compute.manager [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Instance network_info: |[{"id": "bbd1ffd1-e93a-4d6f-941a-ea6d996baf65", "address": "fa:16:3e:85:30:a3", "network": {"id": "53762239-2606-4571-80da-42157aed6438", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1319565120-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65b68c82952f43fd8a028fd76de8aafd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afb671bc-328c-40bf-9c2a-d98695e3d60c", "external-id": "nsx-vlan-transportzone-920", "segmentation_id": 920, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbd1ffd1-e9", "ovs_interfaceid": "bbd1ffd1-e93a-4d6f-941a-ea6d996baf65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 890.639254] env[68638]: DEBUG oslo_concurrency.lockutils [req-54568340-0aa0-4017-a1e5-49ad4bd9660d req-bb70d37b-223f-4148-895d-865d8e26569e service nova] Acquired lock "refresh_cache-92c90438-f7cc-4a48-bfac-f7912709cf88" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.639254] env[68638]: DEBUG nova.network.neutron [req-54568340-0aa0-4017-a1e5-49ad4bd9660d req-bb70d37b-223f-4148-895d-865d8e26569e service nova] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Refreshing network info cache for port bbd1ffd1-e93a-4d6f-941a-ea6d996baf65 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 890.640677] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:30:a3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'afb671bc-328c-40bf-9c2a-d98695e3d60c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bbd1ffd1-e93a-4d6f-941a-ea6d996baf65', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 890.648880] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Creating folder: Project (65b68c82952f43fd8a028fd76de8aafd). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 890.652396] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ffa9d0be-f441-4e11-b4f7-c2573d21ca25 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.665064] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Created folder: Project (65b68c82952f43fd8a028fd76de8aafd) in parent group-v569734. [ 890.665299] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Creating folder: Instances. Parent ref: group-v569927. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 890.665544] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-66ed657a-e563-4a30-83e9-a216d7d3b340 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.676363] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Created folder: Instances in parent group-v569927. [ 890.676363] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 890.676566] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 890.676785] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d79f91ae-0b18-4dd4-9a10-ba9a456e1785 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.697119] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 890.697119] env[68638]: value = "task-2833773" [ 890.697119] env[68638]: _type = "Task" [ 890.697119] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.709341] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833773, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.820291] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 890.820639] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b34a7323-5068-4577-83a3-b2296a2272a3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.830301] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 890.830301] env[68638]: value = "task-2833774" [ 890.830301] env[68638]: _type = "Task" [ 890.830301] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.840630] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833774, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.974286] env[68638]: DEBUG nova.compute.manager [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 890.983960] env[68638]: DEBUG nova.network.neutron [req-54568340-0aa0-4017-a1e5-49ad4bd9660d req-bb70d37b-223f-4148-895d-865d8e26569e service nova] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Updated VIF entry in instance network info cache for port bbd1ffd1-e93a-4d6f-941a-ea6d996baf65. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 890.984389] env[68638]: DEBUG nova.network.neutron [req-54568340-0aa0-4017-a1e5-49ad4bd9660d req-bb70d37b-223f-4148-895d-865d8e26569e service nova] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Updating instance_info_cache with network_info: [{"id": "bbd1ffd1-e93a-4d6f-941a-ea6d996baf65", "address": "fa:16:3e:85:30:a3", "network": {"id": "53762239-2606-4571-80da-42157aed6438", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1319565120-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65b68c82952f43fd8a028fd76de8aafd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afb671bc-328c-40bf-9c2a-d98695e3d60c", "external-id": "nsx-vlan-transportzone-920", "segmentation_id": 920, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbd1ffd1-e9", "ovs_interfaceid": "bbd1ffd1-e93a-4d6f-941a-ea6d996baf65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.016631] env[68638]: DEBUG nova.virt.hardware [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 891.016631] env[68638]: DEBUG nova.virt.hardware [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 891.016631] env[68638]: DEBUG nova.virt.hardware [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 891.016631] env[68638]: DEBUG nova.virt.hardware [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 891.016631] env[68638]: DEBUG nova.virt.hardware [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 891.016631] env[68638]: DEBUG nova.virt.hardware [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 891.016631] env[68638]: DEBUG nova.virt.hardware [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 891.016631] env[68638]: DEBUG nova.virt.hardware [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 891.016994] env[68638]: DEBUG nova.virt.hardware [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 891.016994] env[68638]: DEBUG nova.virt.hardware [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 891.017655] env[68638]: DEBUG nova.virt.hardware [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 891.018347] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23886458-ced6-4871-b202-305ffc8ba861 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.034623] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26cc441e-02a8-4f1e-a183-38118a9ae713 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.057423] env[68638]: ERROR nova.scheduler.client.report [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [req-b362b688-9b29-404d-951d-f06bd5f8293c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b362b688-9b29-404d-951d-f06bd5f8293c"}]} [ 891.067372] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 168c2937-f8ce-472f-b21f-e48eed909f43] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 891.070052] env[68638]: DEBUG oslo_vmware.api [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833769, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.076725] env[68638]: DEBUG nova.scheduler.client.report [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 891.101073] env[68638]: DEBUG nova.scheduler.client.report [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 891.101323] env[68638]: DEBUG nova.compute.provider_tree [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 891.115857] env[68638]: DEBUG nova.scheduler.client.report [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 891.139981] env[68638]: DEBUG nova.scheduler.client.report [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 891.211467] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833773, 'name': CreateVM_Task, 'duration_secs': 0.411158} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.211467] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 891.211467] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.211670] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.211932] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 891.212281] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d97aeba4-4460-41ee-8fae-377325ac6c37 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.217620] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Waiting for the task: (returnval){ [ 891.217620] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]525d49f1-daad-729a-8c9e-1c2af16cfebf" [ 891.217620] env[68638]: _type = "Task" [ 891.217620] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.236400] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525d49f1-daad-729a-8c9e-1c2af16cfebf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.349808] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833774, 'name': PowerOffVM_Task, 'duration_secs': 0.248457} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.354372] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 891.354688] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 891.355794] env[68638]: DEBUG nova.compute.manager [req-78e63919-fa7b-417d-9179-316bf1c3b6b9 req-0eb320b4-f5e2-4a87-bf8b-01f25f00fc1a service nova] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Received event network-vif-plugged-41008c1d-c94d-416f-8c08-9f52170f20c0 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 891.356007] env[68638]: DEBUG oslo_concurrency.lockutils [req-78e63919-fa7b-417d-9179-316bf1c3b6b9 req-0eb320b4-f5e2-4a87-bf8b-01f25f00fc1a service nova] Acquiring lock "3c3fcbca-2477-4037-a978-4b8e9ed0a690-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 891.356229] env[68638]: DEBUG oslo_concurrency.lockutils [req-78e63919-fa7b-417d-9179-316bf1c3b6b9 req-0eb320b4-f5e2-4a87-bf8b-01f25f00fc1a service nova] Lock "3c3fcbca-2477-4037-a978-4b8e9ed0a690-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 891.356408] env[68638]: DEBUG oslo_concurrency.lockutils [req-78e63919-fa7b-417d-9179-316bf1c3b6b9 req-0eb320b4-f5e2-4a87-bf8b-01f25f00fc1a service nova] Lock "3c3fcbca-2477-4037-a978-4b8e9ed0a690-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 891.356560] env[68638]: DEBUG nova.compute.manager [req-78e63919-fa7b-417d-9179-316bf1c3b6b9 req-0eb320b4-f5e2-4a87-bf8b-01f25f00fc1a service nova] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] No waiting events found dispatching network-vif-plugged-41008c1d-c94d-416f-8c08-9f52170f20c0 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 891.356741] env[68638]: WARNING nova.compute.manager [req-78e63919-fa7b-417d-9179-316bf1c3b6b9 req-0eb320b4-f5e2-4a87-bf8b-01f25f00fc1a service nova] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Received unexpected event network-vif-plugged-41008c1d-c94d-416f-8c08-9f52170f20c0 for instance with vm_state building and task_state spawning. [ 891.358483] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee0eb030-390b-4f1d-bc56-147e46e29657 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.367751] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 891.370616] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e82c7769-2ea1-418e-b5ee-c3bd6f9fc306 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.420510] env[68638]: DEBUG nova.network.neutron [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Successfully updated port: 41008c1d-c94d-416f-8c08-9f52170f20c0 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 891.448132] env[68638]: DEBUG nova.network.neutron [-] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.462481] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 891.462721] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 891.462900] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Deleting the datastore file [datastore1] 4edaaa5d-535a-4c63-ab44-724548a0f3eb {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 891.463210] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bea94591-b9cb-48f7-94f5-415f283e014e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.475379] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 891.475379] env[68638]: value = "task-2833776" [ 891.475379] env[68638]: _type = "Task" [ 891.475379] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.485272] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833776, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.486849] env[68638]: DEBUG oslo_concurrency.lockutils [req-54568340-0aa0-4017-a1e5-49ad4bd9660d req-bb70d37b-223f-4148-895d-865d8e26569e service nova] Releasing lock "refresh_cache-92c90438-f7cc-4a48-bfac-f7912709cf88" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.561865] env[68638]: DEBUG oslo_vmware.api [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833769, 'name': PowerOnVM_Task, 'duration_secs': 1.172435} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.562183] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 891.571282] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: be761cf1-0949-42c0-8a38-58af33113a03] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 891.651598] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6968e86e-b3f8-44cc-9ad3-f6d2eeb5a62f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.659484] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a91462-4fe8-4551-bddc-ad26906cb5e9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.692115] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc90755-843d-49f7-8814-da8a832ef7c0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.700417] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92968873-55d6-4520-a85d-93b2436093c8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.714435] env[68638]: DEBUG nova.compute.provider_tree [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 891.727506] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525d49f1-daad-729a-8c9e-1c2af16cfebf, 'name': SearchDatastore_Task, 'duration_secs': 0.037707} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.727791] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.728067] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 891.728322] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.728595] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.728595] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 891.728871] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c73dab4-a3ce-4a30-a4e7-77fdf0791441 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.738233] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 891.738452] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 891.739442] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04c6ee8a-a0cd-4eac-b3b3-989bb782b245 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.744784] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Waiting for the task: (returnval){ [ 891.744784] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]524f0cce-abba-1a8f-99ff-bb6efa027117" [ 891.744784] env[68638]: _type = "Task" [ 891.744784] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.752928] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]524f0cce-abba-1a8f-99ff-bb6efa027117, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.923787] env[68638]: DEBUG oslo_concurrency.lockutils [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquiring lock "refresh_cache-3c3fcbca-2477-4037-a978-4b8e9ed0a690" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.924234] env[68638]: DEBUG oslo_concurrency.lockutils [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquired lock "refresh_cache-3c3fcbca-2477-4037-a978-4b8e9ed0a690" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.924234] env[68638]: DEBUG nova.network.neutron [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 891.950861] env[68638]: INFO nova.compute.manager [-] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Took 1.39 seconds to deallocate network for instance. [ 891.986285] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833776, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.074075] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 8fe9ba7e-021c-4b0f-a9ba-df7a6b753248] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 892.249924] env[68638]: DEBUG nova.scheduler.client.report [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 98 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 892.250222] env[68638]: DEBUG nova.compute.provider_tree [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 98 to 99 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 892.250434] env[68638]: DEBUG nova.compute.provider_tree [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 892.260798] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]524f0cce-abba-1a8f-99ff-bb6efa027117, 'name': SearchDatastore_Task, 'duration_secs': 0.020948} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.261610] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f880fe9-2672-408b-bda3-b737fb3d7747 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.268032] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Waiting for the task: (returnval){ [ 892.268032] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528c938c-63fe-2a4a-cdf4-dba8dae776e2" [ 892.268032] env[68638]: _type = "Task" [ 892.268032] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.277632] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528c938c-63fe-2a4a-cdf4-dba8dae776e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.300434] env[68638]: DEBUG nova.compute.manager [req-c787ef9e-8a65-4cd2-bdcf-8a8fb442fc24 req-08e47734-ac28-4b3f-9d02-16743b45c2d4 service nova] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Received event network-vif-deleted-98fe1cde-b2f1-4fe7-9f25-74077ad59399 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 892.457216] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.462824] env[68638]: DEBUG nova.network.neutron [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 892.489662] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833776, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.566176} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.489921] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 892.490110] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 892.490285] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 892.575159] env[68638]: INFO nova.compute.manager [None req-5eb5afe3-aaa5-4d12-a879-6b503374ec2a tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Updating instance to original state: 'active' [ 892.578162] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 32efc578-2cf9-4b61-bbaa-aa7031a04e33] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 892.628310] env[68638]: DEBUG nova.network.neutron [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Updating instance_info_cache with network_info: [{"id": "41008c1d-c94d-416f-8c08-9f52170f20c0", "address": "fa:16:3e:29:01:ed", "network": {"id": "b3a6cbc1-a4f3-4ceb-b606-42cab79beecb", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1741978212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d2c1dcc55dd42c5b791dd8f1841479b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41008c1d-c9", "ovs_interfaceid": "41008c1d-c94d-416f-8c08-9f52170f20c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.756362] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.813s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.756941] env[68638]: DEBUG nova.compute.manager [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 892.759476] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e2477738-c159-4338-98f8-f21025d12b57 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.065s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.759670] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e2477738-c159-4338-98f8-f21025d12b57 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.761828] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.729s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.761924] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.763572] env[68638]: DEBUG oslo_concurrency.lockutils [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.776s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.765332] env[68638]: INFO nova.compute.claims [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 892.780493] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528c938c-63fe-2a4a-cdf4-dba8dae776e2, 'name': SearchDatastore_Task, 'duration_secs': 0.010059} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.782032] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.782752] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 92c90438-f7cc-4a48-bfac-f7912709cf88/92c90438-f7cc-4a48-bfac-f7912709cf88.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 892.783219] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6ed5e25a-3524-4f24-b035-900a0dae885c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.792141] env[68638]: INFO nova.scheduler.client.report [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Deleted allocations for instance a98f0c63-d327-47b9-b0c2-f7790f1ae87d [ 892.794955] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Waiting for the task: (returnval){ [ 892.794955] env[68638]: value = "task-2833777" [ 892.794955] env[68638]: _type = "Task" [ 892.794955] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.797703] env[68638]: INFO nova.scheduler.client.report [None req-e2477738-c159-4338-98f8-f21025d12b57 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Deleted allocations for instance 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7 [ 892.810920] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Task: {'id': task-2833777, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.085151] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 53571ad6-1fdb-4651-8b4d-24f35ffc815a] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 893.130831] env[68638]: DEBUG oslo_concurrency.lockutils [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Releasing lock "refresh_cache-3c3fcbca-2477-4037-a978-4b8e9ed0a690" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 893.131635] env[68638]: DEBUG nova.compute.manager [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Instance network_info: |[{"id": "41008c1d-c94d-416f-8c08-9f52170f20c0", "address": "fa:16:3e:29:01:ed", "network": {"id": "b3a6cbc1-a4f3-4ceb-b606-42cab79beecb", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1741978212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d2c1dcc55dd42c5b791dd8f1841479b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41008c1d-c9", "ovs_interfaceid": "41008c1d-c94d-416f-8c08-9f52170f20c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 893.133066] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:01:ed', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f5fe645c-e088-401e-ab53-4ae2981dea72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '41008c1d-c94d-416f-8c08-9f52170f20c0', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 893.142274] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 893.142274] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 893.142352] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4b68098-9452-419c-869d-d18991e8448b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.168840] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 893.168840] env[68638]: value = "task-2833778" [ 893.168840] env[68638]: _type = "Task" [ 893.168840] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.178585] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833778, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.270608] env[68638]: DEBUG nova.compute.utils [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 893.274815] env[68638]: DEBUG nova.compute.manager [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 893.274927] env[68638]: DEBUG nova.network.neutron [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 893.305193] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3fbe9d7a-00ec-44f9-adff-a04a2ca41578 tempest-ServersTestMultiNic-1356929959 tempest-ServersTestMultiNic-1356929959-project-member] Lock "a98f0c63-d327-47b9-b0c2-f7790f1ae87d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.650s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.310373] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e2477738-c159-4338-98f8-f21025d12b57 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.366s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.314816] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Task: {'id': task-2833777, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.319389] env[68638]: DEBUG nova.policy [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '815b8ce8a95a4f76a28506fe20117298', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2ae89c3992e04141bf24be9d9e84e302', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 893.499369] env[68638]: DEBUG nova.compute.manager [req-6f3d4b8f-f195-456a-9793-581ac8ccfe91 req-c59700cd-ea3a-4c69-9e04-3106f97f20bf service nova] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Received event network-changed-41008c1d-c94d-416f-8c08-9f52170f20c0 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 893.499602] env[68638]: DEBUG nova.compute.manager [req-6f3d4b8f-f195-456a-9793-581ac8ccfe91 req-c59700cd-ea3a-4c69-9e04-3106f97f20bf service nova] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Refreshing instance network info cache due to event network-changed-41008c1d-c94d-416f-8c08-9f52170f20c0. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 893.499887] env[68638]: DEBUG oslo_concurrency.lockutils [req-6f3d4b8f-f195-456a-9793-581ac8ccfe91 req-c59700cd-ea3a-4c69-9e04-3106f97f20bf service nova] Acquiring lock "refresh_cache-3c3fcbca-2477-4037-a978-4b8e9ed0a690" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.500078] env[68638]: DEBUG oslo_concurrency.lockutils [req-6f3d4b8f-f195-456a-9793-581ac8ccfe91 req-c59700cd-ea3a-4c69-9e04-3106f97f20bf service nova] Acquired lock "refresh_cache-3c3fcbca-2477-4037-a978-4b8e9ed0a690" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 893.500260] env[68638]: DEBUG nova.network.neutron [req-6f3d4b8f-f195-456a-9793-581ac8ccfe91 req-c59700cd-ea3a-4c69-9e04-3106f97f20bf service nova] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Refreshing network info cache for port 41008c1d-c94d-416f-8c08-9f52170f20c0 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 893.534192] env[68638]: DEBUG nova.virt.hardware [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 893.534192] env[68638]: DEBUG nova.virt.hardware [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 893.534192] env[68638]: DEBUG nova.virt.hardware [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 893.534192] env[68638]: DEBUG nova.virt.hardware [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 893.534192] env[68638]: DEBUG nova.virt.hardware [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 893.534192] env[68638]: DEBUG nova.virt.hardware [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 893.534192] env[68638]: DEBUG nova.virt.hardware [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 893.534192] env[68638]: DEBUG nova.virt.hardware [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 893.534192] env[68638]: DEBUG nova.virt.hardware [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 893.534770] env[68638]: DEBUG nova.virt.hardware [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 893.534770] env[68638]: DEBUG nova.virt.hardware [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 893.538021] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe66c6ea-5b0e-490d-b491-262c2061fb03 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.545296] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d77b53-b02a-484c-a5c8-64ce57cd0b0e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.562037] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:29:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3a80436-f7a9-431a-acec-aca3d76e3f9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a9fa307a-55b9-4398-b9a3-75870a0519ca', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 893.569962] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 893.570463] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 893.570817] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b1eb3255-23f9-4dee-b184-a804875b8a85 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.597292] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 421c377f-0b7a-457d-b5dd-50281c65122a] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 893.599241] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 893.599241] env[68638]: value = "task-2833779" [ 893.599241] env[68638]: _type = "Task" [ 893.599241] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.614800] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833779, 'name': CreateVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.683749] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833778, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.743330] env[68638]: DEBUG nova.network.neutron [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Successfully created port: 43a3db27-15d1-4114-b5f5-63529cba0444 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 893.781587] env[68638]: DEBUG nova.compute.manager [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 893.811864] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Task: {'id': task-2833777, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524205} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.812636] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 92c90438-f7cc-4a48-bfac-f7912709cf88/92c90438-f7cc-4a48-bfac-f7912709cf88.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 893.812876] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 893.813157] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6356f87a-a006-4e44-9f56-29f71531b6bb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.826231] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Waiting for the task: (returnval){ [ 893.826231] env[68638]: value = "task-2833780" [ 893.826231] env[68638]: _type = "Task" [ 893.826231] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.838530] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Task: {'id': task-2833780, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.101700] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 1946baab-bb48-4138-8db6-1f530e432c3d] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 894.110031] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "a09c4492-34fd-4010-b547-bfb5b61f252d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 894.110218] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "a09c4492-34fd-4010-b547-bfb5b61f252d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 894.110435] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "a09c4492-34fd-4010-b547-bfb5b61f252d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 894.110671] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "a09c4492-34fd-4010-b547-bfb5b61f252d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 894.110875] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "a09c4492-34fd-4010-b547-bfb5b61f252d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.119725] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833779, 'name': CreateVM_Task, 'duration_secs': 0.355069} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.122599] env[68638]: INFO nova.compute.manager [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Terminating instance [ 894.123934] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 894.124784] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.124977] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 894.125304] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 894.128550] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da2b35c1-43b0-4c77-abf6-d0725a132646 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.137138] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 894.137138] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52aef047-1d59-5ce8-256d-88f6794e1e2d" [ 894.137138] env[68638]: _type = "Task" [ 894.137138] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.147712] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52aef047-1d59-5ce8-256d-88f6794e1e2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.180029] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833778, 'name': CreateVM_Task, 'duration_secs': 0.527877} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.182588] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 894.184029] env[68638]: DEBUG oslo_concurrency.lockutils [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.279210] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ca1043d-9f3d-43ab-bc4b-168b33cba17f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.291752] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39c8f37e-c0ed-4cb0-9fb3-7fcfb74c3d55 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.296393] env[68638]: DEBUG nova.network.neutron [req-6f3d4b8f-f195-456a-9793-581ac8ccfe91 req-c59700cd-ea3a-4c69-9e04-3106f97f20bf service nova] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Updated VIF entry in instance network info cache for port 41008c1d-c94d-416f-8c08-9f52170f20c0. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 894.296754] env[68638]: DEBUG nova.network.neutron [req-6f3d4b8f-f195-456a-9793-581ac8ccfe91 req-c59700cd-ea3a-4c69-9e04-3106f97f20bf service nova] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Updating instance_info_cache with network_info: [{"id": "41008c1d-c94d-416f-8c08-9f52170f20c0", "address": "fa:16:3e:29:01:ed", "network": {"id": "b3a6cbc1-a4f3-4ceb-b606-42cab79beecb", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1741978212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d2c1dcc55dd42c5b791dd8f1841479b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5fe645c-e088-401e-ab53-4ae2981dea72", "external-id": "nsx-vlan-transportzone-219", "segmentation_id": 219, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41008c1d-c9", "ovs_interfaceid": "41008c1d-c94d-416f-8c08-9f52170f20c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.328813] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cc5ba04-dacd-40ba-986d-434fd84224c1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.341848] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c72d00-f529-466b-a5fd-0bef4d0fe75a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.346175] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Task: {'id': task-2833780, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070283} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.346771] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 894.347913] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-127dea3a-e7cf-4711-a00b-d7d6755df6f9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.358448] env[68638]: DEBUG nova.compute.provider_tree [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 894.381544] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] 92c90438-f7cc-4a48-bfac-f7912709cf88/92c90438-f7cc-4a48-bfac-f7912709cf88.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 894.382549] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78506347-0cb0-40c0-bee7-f475a38b06a2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.404721] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Waiting for the task: (returnval){ [ 894.404721] env[68638]: value = "task-2833781" [ 894.404721] env[68638]: _type = "Task" [ 894.404721] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.413855] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Task: {'id': task-2833781, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.607541] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: a5dedd3e-a544-4005-bc9b-0735267d6753] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 894.631111] env[68638]: DEBUG nova.compute.manager [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 894.631355] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 894.632314] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce8c79a6-907a-4c51-9dee-fef077216d53 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.645911] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 894.649527] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-20bc0d10-ee8f-465d-a45a-d2a56552ee7e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.651498] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52aef047-1d59-5ce8-256d-88f6794e1e2d, 'name': SearchDatastore_Task, 'duration_secs': 0.042569} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.652369] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 894.652369] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 894.652536] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.652636] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 894.652816] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 894.653491] env[68638]: DEBUG oslo_concurrency.lockutils [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 894.653789] env[68638]: DEBUG oslo_concurrency.lockutils [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 894.654027] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-458ef6bf-d84d-4b7d-9cff-fc84be7ac573 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.656102] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4247bf4-af17-401a-a750-65764ddd607b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.659138] env[68638]: DEBUG oslo_vmware.api [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 894.659138] env[68638]: value = "task-2833782" [ 894.659138] env[68638]: _type = "Task" [ 894.659138] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.663994] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 894.663994] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5262ccee-08d2-1779-d454-322072cce80b" [ 894.663994] env[68638]: _type = "Task" [ 894.663994] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.672868] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 894.672868] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 894.673488] env[68638]: DEBUG oslo_vmware.api [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833782, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.674312] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da28df61-b679-4725-97a6-dbc26887ec1e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.683916] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5262ccee-08d2-1779-d454-322072cce80b, 'name': SearchDatastore_Task, 'duration_secs': 0.016423} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.685740] env[68638]: DEBUG oslo_concurrency.lockutils [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 894.686175] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 894.686560] env[68638]: DEBUG oslo_concurrency.lockutils [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.687170] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 894.687170] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5275a846-3713-67d0-20ec-231aefe432e2" [ 894.687170] env[68638]: _type = "Task" [ 894.687170] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.696839] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5275a846-3713-67d0-20ec-231aefe432e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.799377] env[68638]: DEBUG nova.compute.manager [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 894.802101] env[68638]: DEBUG oslo_concurrency.lockutils [req-6f3d4b8f-f195-456a-9793-581ac8ccfe91 req-c59700cd-ea3a-4c69-9e04-3106f97f20bf service nova] Releasing lock "refresh_cache-3c3fcbca-2477-4037-a978-4b8e9ed0a690" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 894.831840] env[68638]: DEBUG nova.virt.hardware [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 894.832108] env[68638]: DEBUG nova.virt.hardware [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 894.832272] env[68638]: DEBUG nova.virt.hardware [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 894.832455] env[68638]: DEBUG nova.virt.hardware [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 894.832600] env[68638]: DEBUG nova.virt.hardware [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 894.833058] env[68638]: DEBUG nova.virt.hardware [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 894.833058] env[68638]: DEBUG nova.virt.hardware [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 894.833168] env[68638]: DEBUG nova.virt.hardware [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 894.833332] env[68638]: DEBUG nova.virt.hardware [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 894.833494] env[68638]: DEBUG nova.virt.hardware [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 894.833668] env[68638]: DEBUG nova.virt.hardware [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 894.834714] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7be27878-2a29-4122-85be-1d83d91ef0bb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.843327] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17801ca8-2ea6-44b8-9374-a987f5341d7c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.862688] env[68638]: DEBUG nova.scheduler.client.report [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 894.915231] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Task: {'id': task-2833781, 'name': ReconfigVM_Task, 'duration_secs': 0.285409} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.915541] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Reconfigured VM instance instance-00000045 to attach disk [datastore1] 92c90438-f7cc-4a48-bfac-f7912709cf88/92c90438-f7cc-4a48-bfac-f7912709cf88.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 894.916550] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9571fc50-e9f7-4fcd-a0b6-3dadad79ca26 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.924355] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Waiting for the task: (returnval){ [ 894.924355] env[68638]: value = "task-2833783" [ 894.924355] env[68638]: _type = "Task" [ 894.924355] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.933234] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Task: {'id': task-2833783, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.112914] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 8f841b29-0156-414e-8467-c9a9393cdae9] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 895.171146] env[68638]: DEBUG oslo_vmware.api [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833782, 'name': PowerOffVM_Task, 'duration_secs': 0.215952} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.171146] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 895.171421] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 895.171795] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6666c614-c227-442f-a068-d5dcdd615498 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.199782] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5275a846-3713-67d0-20ec-231aefe432e2, 'name': SearchDatastore_Task, 'duration_secs': 0.016702} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.201252] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0309a92-dd61-41ca-bf6c-06e702b9af9c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.208098] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 895.208098] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528d7342-a282-8807-9405-27948d0d73de" [ 895.208098] env[68638]: _type = "Task" [ 895.208098] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.215551] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528d7342-a282-8807-9405-27948d0d73de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.246298] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 895.246561] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 895.246804] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Deleting the datastore file [datastore2] a09c4492-34fd-4010-b547-bfb5b61f252d {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 895.247021] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-32473621-5ee9-43f3-8f80-83a247ab237e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.254406] env[68638]: DEBUG oslo_vmware.api [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 895.254406] env[68638]: value = "task-2833785" [ 895.254406] env[68638]: _type = "Task" [ 895.254406] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.263922] env[68638]: DEBUG oslo_vmware.api [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833785, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.367998] env[68638]: DEBUG oslo_concurrency.lockutils [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.604s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.368615] env[68638]: DEBUG nova.compute.manager [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 895.378298] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.515s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.379888] env[68638]: INFO nova.compute.claims [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 895.400433] env[68638]: DEBUG nova.network.neutron [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Successfully updated port: 43a3db27-15d1-4114-b5f5-63529cba0444 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 895.438813] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Task: {'id': task-2833783, 'name': Rename_Task, 'duration_secs': 0.166507} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.439127] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 895.439395] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a0dde66-8792-4230-aa89-7b212714df70 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.455475] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Waiting for the task: (returnval){ [ 895.455475] env[68638]: value = "task-2833786" [ 895.455475] env[68638]: _type = "Task" [ 895.455475] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.464799] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Task: {'id': task-2833786, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.528274] env[68638]: DEBUG nova.compute.manager [req-678cb554-2b18-401c-bfe3-93d9b6ce506d req-34f498d1-d320-4cc7-b40a-e5ff282e3295 service nova] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Received event network-vif-plugged-43a3db27-15d1-4114-b5f5-63529cba0444 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 895.528509] env[68638]: DEBUG oslo_concurrency.lockutils [req-678cb554-2b18-401c-bfe3-93d9b6ce506d req-34f498d1-d320-4cc7-b40a-e5ff282e3295 service nova] Acquiring lock "9ba0f737-7947-409c-9163-79d621a29285-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 895.528719] env[68638]: DEBUG oslo_concurrency.lockutils [req-678cb554-2b18-401c-bfe3-93d9b6ce506d req-34f498d1-d320-4cc7-b40a-e5ff282e3295 service nova] Lock "9ba0f737-7947-409c-9163-79d621a29285-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.528951] env[68638]: DEBUG oslo_concurrency.lockutils [req-678cb554-2b18-401c-bfe3-93d9b6ce506d req-34f498d1-d320-4cc7-b40a-e5ff282e3295 service nova] Lock "9ba0f737-7947-409c-9163-79d621a29285-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.529230] env[68638]: DEBUG nova.compute.manager [req-678cb554-2b18-401c-bfe3-93d9b6ce506d req-34f498d1-d320-4cc7-b40a-e5ff282e3295 service nova] [instance: 9ba0f737-7947-409c-9163-79d621a29285] No waiting events found dispatching network-vif-plugged-43a3db27-15d1-4114-b5f5-63529cba0444 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 895.529431] env[68638]: WARNING nova.compute.manager [req-678cb554-2b18-401c-bfe3-93d9b6ce506d req-34f498d1-d320-4cc7-b40a-e5ff282e3295 service nova] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Received unexpected event network-vif-plugged-43a3db27-15d1-4114-b5f5-63529cba0444 for instance with vm_state building and task_state spawning. [ 895.529585] env[68638]: DEBUG nova.compute.manager [req-678cb554-2b18-401c-bfe3-93d9b6ce506d req-34f498d1-d320-4cc7-b40a-e5ff282e3295 service nova] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Received event network-changed-43a3db27-15d1-4114-b5f5-63529cba0444 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 895.529739] env[68638]: DEBUG nova.compute.manager [req-678cb554-2b18-401c-bfe3-93d9b6ce506d req-34f498d1-d320-4cc7-b40a-e5ff282e3295 service nova] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Refreshing instance network info cache due to event network-changed-43a3db27-15d1-4114-b5f5-63529cba0444. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 895.529924] env[68638]: DEBUG oslo_concurrency.lockutils [req-678cb554-2b18-401c-bfe3-93d9b6ce506d req-34f498d1-d320-4cc7-b40a-e5ff282e3295 service nova] Acquiring lock "refresh_cache-9ba0f737-7947-409c-9163-79d621a29285" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.530073] env[68638]: DEBUG oslo_concurrency.lockutils [req-678cb554-2b18-401c-bfe3-93d9b6ce506d req-34f498d1-d320-4cc7-b40a-e5ff282e3295 service nova] Acquired lock "refresh_cache-9ba0f737-7947-409c-9163-79d621a29285" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 895.530231] env[68638]: DEBUG nova.network.neutron [req-678cb554-2b18-401c-bfe3-93d9b6ce506d req-34f498d1-d320-4cc7-b40a-e5ff282e3295 service nova] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Refreshing network info cache for port 43a3db27-15d1-4114-b5f5-63529cba0444 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 895.616489] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: f767af17-f2bb-461d-9e7f-9c62b5504257] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 895.719871] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528d7342-a282-8807-9405-27948d0d73de, 'name': SearchDatastore_Task, 'duration_secs': 0.009884} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.720360] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 895.720694] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 4edaaa5d-535a-4c63-ab44-724548a0f3eb/4edaaa5d-535a-4c63-ab44-724548a0f3eb.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 895.721280] env[68638]: DEBUG oslo_concurrency.lockutils [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 895.721574] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 895.721992] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9df67bde-d06d-4510-9a7b-643e96c62008 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.724550] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fe54a20e-3dbb-4152-a658-240c473bb392 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.733333] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 895.733333] env[68638]: value = "task-2833787" [ 895.733333] env[68638]: _type = "Task" [ 895.733333] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.739622] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 895.739622] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 895.740116] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbe8c473-528d-4787-8dc1-39faf69f0a32 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.746695] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833787, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.750714] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 895.750714] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d6c53f-f747-cee1-5e6f-6cf6b455359f" [ 895.750714] env[68638]: _type = "Task" [ 895.750714] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.763468] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d6c53f-f747-cee1-5e6f-6cf6b455359f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.770605] env[68638]: DEBUG oslo_vmware.api [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833785, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158608} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.771034] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 895.771344] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 895.771657] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 895.771941] env[68638]: INFO nova.compute.manager [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Took 1.14 seconds to destroy the instance on the hypervisor. [ 895.772670] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 895.773588] env[68638]: DEBUG nova.compute.manager [-] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 895.773775] env[68638]: DEBUG nova.network.neutron [-] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 895.887533] env[68638]: DEBUG nova.compute.utils [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 895.892759] env[68638]: DEBUG nova.compute.manager [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 895.892759] env[68638]: DEBUG nova.network.neutron [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 895.903908] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "refresh_cache-9ba0f737-7947-409c-9163-79d621a29285" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.955499] env[68638]: DEBUG nova.policy [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0765e8f9a1d349378b3d2c53c3552787', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '364cb61639c946d59fc35e545e9543c4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 895.972234] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Task: {'id': task-2833786, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.115214] env[68638]: DEBUG nova.network.neutron [req-678cb554-2b18-401c-bfe3-93d9b6ce506d req-34f498d1-d320-4cc7-b40a-e5ff282e3295 service nova] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 896.117920] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 4b5c5b9e-389d-4ed9-a860-bd41a33fbac4] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 896.247627] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833787, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.257450] env[68638]: DEBUG nova.network.neutron [req-678cb554-2b18-401c-bfe3-93d9b6ce506d req-34f498d1-d320-4cc7-b40a-e5ff282e3295 service nova] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.262606] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d6c53f-f747-cee1-5e6f-6cf6b455359f, 'name': SearchDatastore_Task, 'duration_secs': 0.014631} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.263817] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-187fbd64-5ff7-45bf-ae6a-c1c15192ff7e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.270338] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 896.270338] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528e8070-d1c5-0453-1f80-6796a5bab4db" [ 896.270338] env[68638]: _type = "Task" [ 896.270338] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.281714] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528e8070-d1c5-0453-1f80-6796a5bab4db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.397026] env[68638]: DEBUG nova.network.neutron [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Successfully created port: 5c77a676-4e49-4865-adc5-f84b63c42854 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 896.397026] env[68638]: DEBUG nova.compute.manager [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 896.476172] env[68638]: DEBUG oslo_vmware.api [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Task: {'id': task-2833786, 'name': PowerOnVM_Task, 'duration_secs': 0.622119} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.481434] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 896.481664] env[68638]: INFO nova.compute.manager [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Took 8.07 seconds to spawn the instance on the hypervisor. [ 896.481846] env[68638]: DEBUG nova.compute.manager [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 896.483205] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d0fa93e-6c36-4e65-91bf-325a70429800 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.624607] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: ccf76bce-26e2-49e8-9f60-d8f35fe5b3ac] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 896.750976] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833787, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520292} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.753833] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 4edaaa5d-535a-4c63-ab44-724548a0f3eb/4edaaa5d-535a-4c63-ab44-724548a0f3eb.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 896.753987] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 896.754404] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-032be2e4-1848-4533-8e44-36e60a8436c4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.766022] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 896.766022] env[68638]: value = "task-2833788" [ 896.766022] env[68638]: _type = "Task" [ 896.766022] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.766022] env[68638]: DEBUG oslo_concurrency.lockutils [req-678cb554-2b18-401c-bfe3-93d9b6ce506d req-34f498d1-d320-4cc7-b40a-e5ff282e3295 service nova] Releasing lock "refresh_cache-9ba0f737-7947-409c-9163-79d621a29285" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 896.774192] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired lock "refresh_cache-9ba0f737-7947-409c-9163-79d621a29285" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 896.774192] env[68638]: DEBUG nova.network.neutron [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 896.786835] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833788, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.791510] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528e8070-d1c5-0453-1f80-6796a5bab4db, 'name': SearchDatastore_Task, 'duration_secs': 0.011654} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.794304] env[68638]: DEBUG oslo_concurrency.lockutils [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 896.794680] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 3c3fcbca-2477-4037-a978-4b8e9ed0a690/3c3fcbca-2477-4037-a978-4b8e9ed0a690.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 896.795487] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-846d4377-3868-4ec0-af21-70f6e0415c19 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.803650] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 896.803650] env[68638]: value = "task-2833789" [ 896.803650] env[68638]: _type = "Task" [ 896.803650] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.815797] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833789, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.937023] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a07e92b-7369-4998-9d08-1d5f79ab6634 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.944753] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b108b226-3cdf-43ec-afd8-6a22edf8406e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.980062] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fb78ee8-1083-4667-aeea-0bdbbdc0df9e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.989546] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2908b119-58fb-4bfd-a8fb-05581a276e93 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.000279] env[68638]: INFO nova.compute.manager [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Took 58.13 seconds to build instance. [ 897.011259] env[68638]: DEBUG nova.compute.provider_tree [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 897.044360] env[68638]: DEBUG nova.network.neutron [-] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.135228] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: a5e993de-7aad-4b34-8946-563dc69a6f25] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 897.277053] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833788, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07602} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.279715] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 897.280702] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ece19c8-61b1-47cc-8669-8bcf9394bb36 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.310320] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] 4edaaa5d-535a-4c63-ab44-724548a0f3eb/4edaaa5d-535a-4c63-ab44-724548a0f3eb.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 897.310802] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee2269dd-b2e4-46d9-9a87-5b891405d275 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.334918] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833789, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.336352] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 897.336352] env[68638]: value = "task-2833790" [ 897.336352] env[68638]: _type = "Task" [ 897.336352] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.344527] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833790, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.345337] env[68638]: DEBUG nova.network.neutron [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 897.412674] env[68638]: DEBUG nova.compute.manager [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 897.454848] env[68638]: DEBUG nova.virt.hardware [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 897.455120] env[68638]: DEBUG nova.virt.hardware [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 897.455285] env[68638]: DEBUG nova.virt.hardware [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 897.455467] env[68638]: DEBUG nova.virt.hardware [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 897.455613] env[68638]: DEBUG nova.virt.hardware [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 897.455758] env[68638]: DEBUG nova.virt.hardware [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 897.455966] env[68638]: DEBUG nova.virt.hardware [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 897.456292] env[68638]: DEBUG nova.virt.hardware [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 897.456878] env[68638]: DEBUG nova.virt.hardware [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 897.456878] env[68638]: DEBUG nova.virt.hardware [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 897.456878] env[68638]: DEBUG nova.virt.hardware [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 897.458905] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43353e6b-d7b5-42db-a3a7-e68e1901badb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.468162] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be2dc49-9feb-456f-98c9-14ac3f1b107d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.513195] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5e1f0927-3f7e-484d-a989-9fad9ae966c4 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Lock "92c90438-f7cc-4a48-bfac-f7912709cf88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.180s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.525038] env[68638]: DEBUG nova.network.neutron [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Updating instance_info_cache with network_info: [{"id": "43a3db27-15d1-4114-b5f5-63529cba0444", "address": "fa:16:3e:17:bb:90", "network": {"id": "4ccf9e56-9fb3-48ff-bf2d-a35faedb905b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1191830363-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ae89c3992e04141bf24be9d9e84e302", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43a3db27-15", "ovs_interfaceid": "43a3db27-15d1-4114-b5f5-63529cba0444", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.547241] env[68638]: DEBUG nova.scheduler.client.report [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 99 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 897.547621] env[68638]: DEBUG nova.compute.provider_tree [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 99 to 100 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 897.547857] env[68638]: DEBUG nova.compute.provider_tree [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 897.551429] env[68638]: INFO nova.compute.manager [-] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Took 1.78 seconds to deallocate network for instance. [ 897.560825] env[68638]: DEBUG nova.compute.manager [req-269151ac-e72f-4c3d-a839-c786ebae9772 req-8ad5cbfe-6eda-4acc-a1e0-253f3443ba2f service nova] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Received event network-vif-deleted-0e7e9cd6-7e3c-4f1c-8b65-632742ece1ce {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 897.638525] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: c71693e9-aeaa-4f12-b5cf-a179e558505d] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 897.822921] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833789, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.849874] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833790, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.028830] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Releasing lock "refresh_cache-9ba0f737-7947-409c-9163-79d621a29285" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 898.029227] env[68638]: DEBUG nova.compute.manager [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Instance network_info: |[{"id": "43a3db27-15d1-4114-b5f5-63529cba0444", "address": "fa:16:3e:17:bb:90", "network": {"id": "4ccf9e56-9fb3-48ff-bf2d-a35faedb905b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1191830363-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ae89c3992e04141bf24be9d9e84e302", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43a3db27-15", "ovs_interfaceid": "43a3db27-15d1-4114-b5f5-63529cba0444", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 898.029728] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:bb:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e547d234-640c-449b-8279-0b16f75d6627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '43a3db27-15d1-4114-b5f5-63529cba0444', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 898.039556] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Creating folder: Project (2ae89c3992e04141bf24be9d9e84e302). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 898.040029] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-41b776e9-b492-4992-956f-4f67bec841d1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.053796] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Created folder: Project (2ae89c3992e04141bf24be9d9e84e302) in parent group-v569734. [ 898.053796] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Creating folder: Instances. Parent ref: group-v569932. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 898.053796] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-96e708bb-a1e5-4be1-99eb-c0decc5a9954 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.055922] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.684s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.056411] env[68638]: DEBUG nova.compute.manager [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 898.058935] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.845s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.060726] env[68638]: INFO nova.compute.claims [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 898.067390] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.073259] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Created folder: Instances in parent group-v569932. [ 898.073500] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 898.073692] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 898.073903] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a566d08c-4a3b-413a-8d2c-df7db7e9f521 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.106666] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 898.106666] env[68638]: value = "task-2833793" [ 898.106666] env[68638]: _type = "Task" [ 898.106666] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.109569] env[68638]: DEBUG nova.compute.manager [req-5161a64d-1ccc-4da8-9b5b-537f3baf22ed req-334ed737-a177-43fc-a346-573f301225e2 service nova] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Received event network-vif-plugged-5c77a676-4e49-4865-adc5-f84b63c42854 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 898.109960] env[68638]: DEBUG oslo_concurrency.lockutils [req-5161a64d-1ccc-4da8-9b5b-537f3baf22ed req-334ed737-a177-43fc-a346-573f301225e2 service nova] Acquiring lock "fd6d5951-f2a1-422d-b137-4d19759f9060-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.110095] env[68638]: DEBUG oslo_concurrency.lockutils [req-5161a64d-1ccc-4da8-9b5b-537f3baf22ed req-334ed737-a177-43fc-a346-573f301225e2 service nova] Lock "fd6d5951-f2a1-422d-b137-4d19759f9060-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.110241] env[68638]: DEBUG oslo_concurrency.lockutils [req-5161a64d-1ccc-4da8-9b5b-537f3baf22ed req-334ed737-a177-43fc-a346-573f301225e2 service nova] Lock "fd6d5951-f2a1-422d-b137-4d19759f9060-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.110411] env[68638]: DEBUG nova.compute.manager [req-5161a64d-1ccc-4da8-9b5b-537f3baf22ed req-334ed737-a177-43fc-a346-573f301225e2 service nova] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] No waiting events found dispatching network-vif-plugged-5c77a676-4e49-4865-adc5-f84b63c42854 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 898.110577] env[68638]: WARNING nova.compute.manager [req-5161a64d-1ccc-4da8-9b5b-537f3baf22ed req-334ed737-a177-43fc-a346-573f301225e2 service nova] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Received unexpected event network-vif-plugged-5c77a676-4e49-4865-adc5-f84b63c42854 for instance with vm_state building and task_state spawning. [ 898.123969] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833793, 'name': CreateVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.141790] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 14772ba8-bde2-42ef-9a37-df876c8af321] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 898.175332] env[68638]: DEBUG nova.network.neutron [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Successfully updated port: 5c77a676-4e49-4865-adc5-f84b63c42854 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 898.323384] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833789, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.516464} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.323667] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 3c3fcbca-2477-4037-a978-4b8e9ed0a690/3c3fcbca-2477-4037-a978-4b8e9ed0a690.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 898.323890] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 898.324388] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-be656cc7-0398-47e5-9fc1-fff401c044c9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.334974] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 898.334974] env[68638]: value = "task-2833794" [ 898.334974] env[68638]: _type = "Task" [ 898.334974] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.352363] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833790, 'name': ReconfigVM_Task, 'duration_secs': 0.967592} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.352662] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833794, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.352940] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Reconfigured VM instance instance-0000001c to attach disk [datastore1] 4edaaa5d-535a-4c63-ab44-724548a0f3eb/4edaaa5d-535a-4c63-ab44-724548a0f3eb.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 898.353620] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f66d0ed1-06f4-4a51-aa45-31dc1be61916 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.362789] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 898.362789] env[68638]: value = "task-2833795" [ 898.362789] env[68638]: _type = "Task" [ 898.362789] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.372624] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833795, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.565451] env[68638]: DEBUG nova.compute.utils [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 898.571024] env[68638]: DEBUG nova.compute.manager [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 898.571024] env[68638]: DEBUG nova.network.neutron [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 898.618957] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833793, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.639286] env[68638]: DEBUG nova.policy [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5b140aa82f044f108521ab8c0d28c0a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3e5757d1f74492481048df4a29032ca', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 898.645859] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: e3cf739a-3104-473d-af66-d9974ed1a222] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 898.683545] env[68638]: DEBUG oslo_concurrency.lockutils [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Acquiring lock "refresh_cache-fd6d5951-f2a1-422d-b137-4d19759f9060" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.683545] env[68638]: DEBUG oslo_concurrency.lockutils [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Acquired lock "refresh_cache-fd6d5951-f2a1-422d-b137-4d19759f9060" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 898.683545] env[68638]: DEBUG nova.network.neutron [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 898.846603] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833794, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105544} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.846970] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 898.847947] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20256052-75ff-4b54-9cc9-19dad2ff6cd5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.876638] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] 3c3fcbca-2477-4037-a978-4b8e9ed0a690/3c3fcbca-2477-4037-a978-4b8e9ed0a690.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 898.881580] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7eae8dce-0291-41b6-8ff1-d4b3552676f1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.910654] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833795, 'name': Rename_Task, 'duration_secs': 0.258563} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.910654] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 898.910654] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 898.910654] env[68638]: value = "task-2833796" [ 898.910654] env[68638]: _type = "Task" [ 898.910654] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.910654] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3be3607f-0621-4c9c-a63b-494845545b3c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.924462] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833796, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.926159] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 898.926159] env[68638]: value = "task-2833797" [ 898.926159] env[68638]: _type = "Task" [ 898.926159] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.932602] env[68638]: DEBUG nova.network.neutron [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Successfully created port: cf8deeee-8158-4035-a42c-831e6b8d6f83 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 898.938194] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833797, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.070278] env[68638]: DEBUG nova.compute.manager [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 899.119074] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833793, 'name': CreateVM_Task, 'duration_secs': 0.52907} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.123249] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 899.123249] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.123249] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 899.123249] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 899.123696] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2398c959-57fa-4dca-a2b8-e5591e781ed8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.129611] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 899.129611] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]521ebb76-1347-669e-4011-4e5562d125bc" [ 899.129611] env[68638]: _type = "Task" [ 899.129611] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.139527] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]521ebb76-1347-669e-4011-4e5562d125bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.153222] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 6cb1846a-02aa-4dc3-a573-858abf5a0bdf] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 899.257420] env[68638]: DEBUG nova.network.neutron [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 899.424670] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833796, 'name': ReconfigVM_Task, 'duration_secs': 0.298424} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.424966] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Reconfigured VM instance instance-00000046 to attach disk [datastore1] 3c3fcbca-2477-4037-a978-4b8e9ed0a690/3c3fcbca-2477-4037-a978-4b8e9ed0a690.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 899.426144] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e1d36adf-7d0e-4106-b230-572ab6f278fa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.457925] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833797, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.459371] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 899.459371] env[68638]: value = "task-2833798" [ 899.459371] env[68638]: _type = "Task" [ 899.459371] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.471552] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833798, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.570063] env[68638]: DEBUG nova.network.neutron [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Updating instance_info_cache with network_info: [{"id": "5c77a676-4e49-4865-adc5-f84b63c42854", "address": "fa:16:3e:f7:41:c8", "network": {"id": "f7a20f96-7b1f-4e40-91f8-61b4457d43ad", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1873350520-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "364cb61639c946d59fc35e545e9543c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c77a676-4e", "ovs_interfaceid": "5c77a676-4e49-4865-adc5-f84b63c42854", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.572286] env[68638]: DEBUG oslo_concurrency.lockutils [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Acquiring lock "92c90438-f7cc-4a48-bfac-f7912709cf88" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 899.572750] env[68638]: DEBUG oslo_concurrency.lockutils [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Lock "92c90438-f7cc-4a48-bfac-f7912709cf88" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.573085] env[68638]: DEBUG oslo_concurrency.lockutils [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Acquiring lock "92c90438-f7cc-4a48-bfac-f7912709cf88-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 899.573487] env[68638]: DEBUG oslo_concurrency.lockutils [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Lock "92c90438-f7cc-4a48-bfac-f7912709cf88-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.573826] env[68638]: DEBUG oslo_concurrency.lockutils [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Lock "92c90438-f7cc-4a48-bfac-f7912709cf88-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.576210] env[68638]: INFO nova.compute.manager [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Terminating instance [ 899.650063] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]521ebb76-1347-669e-4011-4e5562d125bc, 'name': SearchDatastore_Task, 'duration_secs': 0.011108} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.653112] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 899.653378] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 899.653621] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.653768] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 899.653951] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 899.654712] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-23ec4833-9ca9-4a1b-987f-564a30ab60c8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.658548] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 54af9c38-c8b6-4ef9-be63-de545dcc0da5] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 899.665040] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 899.665223] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 899.668995] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c7ffda1-93d4-454b-9849-ebf99ef95891 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.677651] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 899.677651] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52cdcd38-f160-9c1c-2778-e9969bf42175" [ 899.677651] env[68638]: _type = "Task" [ 899.677651] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.689820] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52cdcd38-f160-9c1c-2778-e9969bf42175, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.715330] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e06037-a7c2-434a-bdd7-0b58a9ab3d9e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.726699] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0009457-d727-49e5-b0db-7e09ea084b24 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.766722] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-793624d9-44f1-44d0-953b-8d0080c8c7d2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.776380] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c181caec-2942-4ab9-9518-69581b501c50 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.792502] env[68638]: DEBUG nova.compute.provider_tree [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 899.938488] env[68638]: DEBUG oslo_vmware.api [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833797, 'name': PowerOnVM_Task, 'duration_secs': 0.763296} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.939062] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 899.939277] env[68638]: DEBUG nova.compute.manager [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 899.940269] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83296801-99bc-4a7e-a93d-b49f31119763 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.971159] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833798, 'name': Rename_Task, 'duration_secs': 0.177522} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.972060] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 899.972484] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0aa1aefb-c2d8-40e4-b25c-befbb1dbec95 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.980925] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 899.980925] env[68638]: value = "task-2833799" [ 899.980925] env[68638]: _type = "Task" [ 899.980925] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.992745] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833799, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.076765] env[68638]: DEBUG oslo_concurrency.lockutils [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Releasing lock "refresh_cache-fd6d5951-f2a1-422d-b137-4d19759f9060" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.077220] env[68638]: DEBUG nova.compute.manager [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Instance network_info: |[{"id": "5c77a676-4e49-4865-adc5-f84b63c42854", "address": "fa:16:3e:f7:41:c8", "network": {"id": "f7a20f96-7b1f-4e40-91f8-61b4457d43ad", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1873350520-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "364cb61639c946d59fc35e545e9543c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c77a676-4e", "ovs_interfaceid": "5c77a676-4e49-4865-adc5-f84b63c42854", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 900.077693] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:41:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6edb8eae-1113-49d0-84f7-9fd9f82b26fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5c77a676-4e49-4865-adc5-f84b63c42854', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 900.085736] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Creating folder: Project (364cb61639c946d59fc35e545e9543c4). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 900.086035] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-405be18f-6675-4e50-b2d8-b821e479ba48 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.088942] env[68638]: DEBUG nova.compute.manager [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 900.088942] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 900.089483] env[68638]: DEBUG nova.compute.manager [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 900.092743] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80b4404-662d-4c4b-97eb-e1b294eb098d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.102909] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 900.103654] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf7184aa-08e3-46d3-bb5b-73cedec3435a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.107280] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Created folder: Project (364cb61639c946d59fc35e545e9543c4) in parent group-v569734. [ 900.108678] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Creating folder: Instances. Parent ref: group-v569935. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 900.108678] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b9b0cca0-e46f-4ffd-bcd5-d60d0d5bc66d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.113207] env[68638]: DEBUG oslo_vmware.api [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Waiting for the task: (returnval){ [ 900.113207] env[68638]: value = "task-2833801" [ 900.113207] env[68638]: _type = "Task" [ 900.113207] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.124504] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Created folder: Instances in parent group-v569935. [ 900.124504] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 900.124504] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 900.124504] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-024367a1-b57d-4720-a797-d2fcba90fe0a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.150101] env[68638]: DEBUG nova.virt.hardware [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 900.150101] env[68638]: DEBUG nova.virt.hardware [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 900.150101] env[68638]: DEBUG nova.virt.hardware [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 900.150522] env[68638]: DEBUG nova.virt.hardware [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 900.150522] env[68638]: DEBUG nova.virt.hardware [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 900.150630] env[68638]: DEBUG nova.virt.hardware [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 900.151832] env[68638]: DEBUG nova.virt.hardware [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 900.151832] env[68638]: DEBUG nova.virt.hardware [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 900.151832] env[68638]: DEBUG nova.virt.hardware [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 900.151832] env[68638]: DEBUG nova.virt.hardware [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 900.151832] env[68638]: DEBUG nova.virt.hardware [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 900.152093] env[68638]: DEBUG oslo_vmware.api [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Task: {'id': task-2833801, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.153628] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96e66872-6b87-40c4-9389-48b29a3f6103 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.159432] env[68638]: DEBUG nova.compute.manager [req-e74d2126-0c52-49ac-b7eb-f33ced943f03 req-4f24c00b-bfc2-4b1a-b144-7a8fa20a1ab8 service nova] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Received event network-changed-5c77a676-4e49-4865-adc5-f84b63c42854 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 900.159432] env[68638]: DEBUG nova.compute.manager [req-e74d2126-0c52-49ac-b7eb-f33ced943f03 req-4f24c00b-bfc2-4b1a-b144-7a8fa20a1ab8 service nova] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Refreshing instance network info cache due to event network-changed-5c77a676-4e49-4865-adc5-f84b63c42854. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 900.159432] env[68638]: DEBUG oslo_concurrency.lockutils [req-e74d2126-0c52-49ac-b7eb-f33ced943f03 req-4f24c00b-bfc2-4b1a-b144-7a8fa20a1ab8 service nova] Acquiring lock "refresh_cache-fd6d5951-f2a1-422d-b137-4d19759f9060" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.159432] env[68638]: DEBUG oslo_concurrency.lockutils [req-e74d2126-0c52-49ac-b7eb-f33ced943f03 req-4f24c00b-bfc2-4b1a-b144-7a8fa20a1ab8 service nova] Acquired lock "refresh_cache-fd6d5951-f2a1-422d-b137-4d19759f9060" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 900.159591] env[68638]: DEBUG nova.network.neutron [req-e74d2126-0c52-49ac-b7eb-f33ced943f03 req-4f24c00b-bfc2-4b1a-b144-7a8fa20a1ab8 service nova] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Refreshing network info cache for port 5c77a676-4e49-4865-adc5-f84b63c42854 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 900.165641] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 4ce62ccc-70a4-48a9-8acf-3e3ded9bf0cc] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 900.166599] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 900.166599] env[68638]: value = "task-2833803" [ 900.166599] env[68638]: _type = "Task" [ 900.166599] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.178406] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-105104f8-aa13-410f-93c7-51dd9f50df69 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.192092] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833803, 'name': CreateVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.208238] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52cdcd38-f160-9c1c-2778-e9969bf42175, 'name': SearchDatastore_Task, 'duration_secs': 0.011199} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.209108] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-784438d6-b094-4de0-95a3-d6cbe028ac90 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.217242] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 900.217242] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e70a8d-717a-5368-2da0-12a5c6799d45" [ 900.217242] env[68638]: _type = "Task" [ 900.217242] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.229635] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e70a8d-717a-5368-2da0-12a5c6799d45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.302745] env[68638]: DEBUG nova.scheduler.client.report [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 900.468534] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.493826] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833799, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.623447] env[68638]: DEBUG oslo_vmware.api [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Task: {'id': task-2833801, 'name': PowerOffVM_Task, 'duration_secs': 0.29785} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.623608] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 900.623745] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 900.623995] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-19b5611a-aa7c-44ef-a647-f50311fa2f62 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.679843] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 05ff9ae9-a0e5-4e21-ab3b-9df8cdb24944] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 900.691979] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833803, 'name': CreateVM_Task, 'duration_secs': 0.396967} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.691979] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 900.691979] env[68638]: DEBUG oslo_concurrency.lockutils [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.691979] env[68638]: DEBUG oslo_concurrency.lockutils [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 900.691979] env[68638]: DEBUG oslo_concurrency.lockutils [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 900.691979] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c4910f7-7a34-48d9-8397-171a5c5b4302 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.703267] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Waiting for the task: (returnval){ [ 900.703267] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]520ee086-3824-2043-b17a-448edbd21fa8" [ 900.703267] env[68638]: _type = "Task" [ 900.703267] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.715692] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 900.716125] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 900.716429] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Deleting the datastore file [datastore1] 92c90438-f7cc-4a48-bfac-f7912709cf88 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 900.721418] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1aca3b1e-7a15-4668-ae7a-3d408e9e683d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.731072] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]520ee086-3824-2043-b17a-448edbd21fa8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.739012] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e70a8d-717a-5368-2da0-12a5c6799d45, 'name': SearchDatastore_Task, 'duration_secs': 0.014052} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.743019] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.743019] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 9ba0f737-7947-409c-9163-79d621a29285/9ba0f737-7947-409c-9163-79d621a29285.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 900.743019] env[68638]: DEBUG oslo_vmware.api [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Waiting for the task: (returnval){ [ 900.743019] env[68638]: value = "task-2833805" [ 900.743019] env[68638]: _type = "Task" [ 900.743019] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.743019] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a95331aa-3a17-4d3b-852d-48669279e398 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.755017] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 900.755017] env[68638]: value = "task-2833806" [ 900.755017] env[68638]: _type = "Task" [ 900.755017] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.757123] env[68638]: DEBUG oslo_vmware.api [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Task: {'id': task-2833805, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.764567] env[68638]: DEBUG nova.network.neutron [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Successfully updated port: cf8deeee-8158-4035-a42c-831e6b8d6f83 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 900.771638] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833806, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.810841] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.752s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.811339] env[68638]: DEBUG nova.compute.manager [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 900.815127] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.958s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.818969] env[68638]: INFO nova.compute.claims [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 900.992669] env[68638]: DEBUG oslo_vmware.api [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833799, 'name': PowerOnVM_Task, 'duration_secs': 0.544469} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.993304] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 900.993533] env[68638]: INFO nova.compute.manager [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Took 10.02 seconds to spawn the instance on the hypervisor. [ 900.993714] env[68638]: DEBUG nova.compute.manager [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 900.994560] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38115d55-a058-4d3b-88ba-8749b3e609fc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.999557] env[68638]: DEBUG nova.network.neutron [req-e74d2126-0c52-49ac-b7eb-f33ced943f03 req-4f24c00b-bfc2-4b1a-b144-7a8fa20a1ab8 service nova] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Updated VIF entry in instance network info cache for port 5c77a676-4e49-4865-adc5-f84b63c42854. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 900.999871] env[68638]: DEBUG nova.network.neutron [req-e74d2126-0c52-49ac-b7eb-f33ced943f03 req-4f24c00b-bfc2-4b1a-b144-7a8fa20a1ab8 service nova] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Updating instance_info_cache with network_info: [{"id": "5c77a676-4e49-4865-adc5-f84b63c42854", "address": "fa:16:3e:f7:41:c8", "network": {"id": "f7a20f96-7b1f-4e40-91f8-61b4457d43ad", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1873350520-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "364cb61639c946d59fc35e545e9543c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c77a676-4e", "ovs_interfaceid": "5c77a676-4e49-4865-adc5-f84b63c42854", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.187014] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 901.188498] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Cleaning up deleted instances with incomplete migration {{(pid=68638) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11791}} [ 901.215036] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]520ee086-3824-2043-b17a-448edbd21fa8, 'name': SearchDatastore_Task, 'duration_secs': 0.028762} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.215036] env[68638]: DEBUG oslo_concurrency.lockutils [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 901.215269] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 901.215418] env[68638]: DEBUG oslo_concurrency.lockutils [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.215617] env[68638]: DEBUG oslo_concurrency.lockutils [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 901.215784] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 901.216107] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ba0a34f-726c-499e-b004-387fa957933d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.233367] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 901.233562] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 901.234517] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4208bbb4-6085-4472-b0d7-f8d9826dd004 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.251557] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Waiting for the task: (returnval){ [ 901.251557] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5276dc6e-9f73-d86b-aa57-a1305d3da784" [ 901.251557] env[68638]: _type = "Task" [ 901.251557] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.260407] env[68638]: DEBUG oslo_vmware.api [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Task: {'id': task-2833805, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.342777} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.264148] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 901.264215] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 901.264399] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 901.264586] env[68638]: INFO nova.compute.manager [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Took 1.18 seconds to destroy the instance on the hypervisor. [ 901.264841] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 901.265063] env[68638]: DEBUG nova.compute.manager [-] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 901.265163] env[68638]: DEBUG nova.network.neutron [-] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 901.271428] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "refresh_cache-9975e756-b571-4e70-ba50-a6001d0b064c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.271595] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquired lock "refresh_cache-9975e756-b571-4e70-ba50-a6001d0b064c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 901.271742] env[68638]: DEBUG nova.network.neutron [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 901.273164] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5276dc6e-9f73-d86b-aa57-a1305d3da784, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.281487] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833806, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.322669] env[68638]: DEBUG nova.compute.utils [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 901.327102] env[68638]: DEBUG nova.compute.manager [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 901.327296] env[68638]: DEBUG nova.network.neutron [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 901.408994] env[68638]: DEBUG nova.policy [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '37bdf0a194a047dbbcbaf2c34324b3bc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c3f84fb9b3b0442d89b45cc44b0eda16', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 901.508477] env[68638]: DEBUG oslo_concurrency.lockutils [req-e74d2126-0c52-49ac-b7eb-f33ced943f03 req-4f24c00b-bfc2-4b1a-b144-7a8fa20a1ab8 service nova] Releasing lock "refresh_cache-fd6d5951-f2a1-422d-b137-4d19759f9060" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 901.524972] env[68638]: INFO nova.compute.manager [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Took 52.88 seconds to build instance. [ 901.700665] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 901.706136] env[68638]: DEBUG oslo_concurrency.lockutils [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Acquiring lock "cd27220d-c706-4450-a01b-c871c608056f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 901.706136] env[68638]: DEBUG oslo_concurrency.lockutils [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Lock "cd27220d-c706-4450-a01b-c871c608056f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 901.729637] env[68638]: DEBUG nova.compute.manager [req-062e1659-8906-4fed-9db0-5f9b2a2c8a1e req-a1bc08b0-c89e-417d-8115-51ac21b4d18c service nova] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Received event network-vif-deleted-bbd1ffd1-e93a-4d6f-941a-ea6d996baf65 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 901.729985] env[68638]: INFO nova.compute.manager [req-062e1659-8906-4fed-9db0-5f9b2a2c8a1e req-a1bc08b0-c89e-417d-8115-51ac21b4d18c service nova] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Neutron deleted interface bbd1ffd1-e93a-4d6f-941a-ea6d996baf65; detaching it from the instance and deleting it from the info cache [ 901.729985] env[68638]: DEBUG nova.network.neutron [req-062e1659-8906-4fed-9db0-5f9b2a2c8a1e req-a1bc08b0-c89e-417d-8115-51ac21b4d18c service nova] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.769692] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5276dc6e-9f73-d86b-aa57-a1305d3da784, 'name': SearchDatastore_Task, 'duration_secs': 0.054763} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.771065] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f077d707-e841-460e-bdba-9221fdefcd3e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.779428] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833806, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.790287} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.780616] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 9ba0f737-7947-409c-9163-79d621a29285/9ba0f737-7947-409c-9163-79d621a29285.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 901.782036] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 901.782036] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3299a253-991f-45a1-a6a6-262a4fa50c67 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.784918] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Waiting for the task: (returnval){ [ 901.784918] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52084fb3-8ad5-d194-4d33-1176b8ca28ce" [ 901.784918] env[68638]: _type = "Task" [ 901.784918] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.790506] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 901.790506] env[68638]: value = "task-2833807" [ 901.790506] env[68638]: _type = "Task" [ 901.790506] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.798433] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52084fb3-8ad5-d194-4d33-1176b8ca28ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.805203] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833807, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.818641] env[68638]: DEBUG nova.network.neutron [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 901.834631] env[68638]: DEBUG nova.compute.manager [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 901.838811] env[68638]: DEBUG nova.network.neutron [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Successfully created port: 99d48199-ae21-4f20-8c41-f96a59bcf89b {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 902.014242] env[68638]: DEBUG nova.network.neutron [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Updating instance_info_cache with network_info: [{"id": "cf8deeee-8158-4035-a42c-831e6b8d6f83", "address": "fa:16:3e:97:f7:1f", "network": {"id": "104a324f-fd5a-4c74-9a7a-6126392ea10c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1310127541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3e5757d1f74492481048df4a29032ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf8deeee-81", "ovs_interfaceid": "cf8deeee-8158-4035-a42c-831e6b8d6f83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.029915] env[68638]: DEBUG oslo_concurrency.lockutils [None req-10670051-a584-4cf2-9cc6-286db57d9218 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "3c3fcbca-2477-4037-a978-4b8e9ed0a690" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.687s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.140101] env[68638]: DEBUG nova.network.neutron [-] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.210660] env[68638]: DEBUG nova.compute.manager [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 902.237425] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d434fc30-a746-4a3a-b3db-d0490c3a5f68 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.251588] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e7e37c9-0fd1-4dd3-88d6-ffc26fc29368 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.300675] env[68638]: DEBUG nova.compute.manager [req-062e1659-8906-4fed-9db0-5f9b2a2c8a1e req-a1bc08b0-c89e-417d-8115-51ac21b4d18c service nova] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Detach interface failed, port_id=bbd1ffd1-e93a-4d6f-941a-ea6d996baf65, reason: Instance 92c90438-f7cc-4a48-bfac-f7912709cf88 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 902.308957] env[68638]: DEBUG nova.compute.manager [req-0c2087b3-ea26-4c93-a7af-8c09b831c963 req-723389c6-39df-4d14-8b0b-4bd2800d7f90 service nova] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Received event network-vif-plugged-cf8deeee-8158-4035-a42c-831e6b8d6f83 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 902.309215] env[68638]: DEBUG oslo_concurrency.lockutils [req-0c2087b3-ea26-4c93-a7af-8c09b831c963 req-723389c6-39df-4d14-8b0b-4bd2800d7f90 service nova] Acquiring lock "9975e756-b571-4e70-ba50-a6001d0b064c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.309443] env[68638]: DEBUG oslo_concurrency.lockutils [req-0c2087b3-ea26-4c93-a7af-8c09b831c963 req-723389c6-39df-4d14-8b0b-4bd2800d7f90 service nova] Lock "9975e756-b571-4e70-ba50-a6001d0b064c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.309619] env[68638]: DEBUG oslo_concurrency.lockutils [req-0c2087b3-ea26-4c93-a7af-8c09b831c963 req-723389c6-39df-4d14-8b0b-4bd2800d7f90 service nova] Lock "9975e756-b571-4e70-ba50-a6001d0b064c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.309903] env[68638]: DEBUG nova.compute.manager [req-0c2087b3-ea26-4c93-a7af-8c09b831c963 req-723389c6-39df-4d14-8b0b-4bd2800d7f90 service nova] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] No waiting events found dispatching network-vif-plugged-cf8deeee-8158-4035-a42c-831e6b8d6f83 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 902.310115] env[68638]: WARNING nova.compute.manager [req-0c2087b3-ea26-4c93-a7af-8c09b831c963 req-723389c6-39df-4d14-8b0b-4bd2800d7f90 service nova] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Received unexpected event network-vif-plugged-cf8deeee-8158-4035-a42c-831e6b8d6f83 for instance with vm_state building and task_state spawning. [ 902.310303] env[68638]: DEBUG nova.compute.manager [req-0c2087b3-ea26-4c93-a7af-8c09b831c963 req-723389c6-39df-4d14-8b0b-4bd2800d7f90 service nova] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Received event network-changed-cf8deeee-8158-4035-a42c-831e6b8d6f83 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 902.311437] env[68638]: DEBUG nova.compute.manager [req-0c2087b3-ea26-4c93-a7af-8c09b831c963 req-723389c6-39df-4d14-8b0b-4bd2800d7f90 service nova] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Refreshing instance network info cache due to event network-changed-cf8deeee-8158-4035-a42c-831e6b8d6f83. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 902.311690] env[68638]: DEBUG oslo_concurrency.lockutils [req-0c2087b3-ea26-4c93-a7af-8c09b831c963 req-723389c6-39df-4d14-8b0b-4bd2800d7f90 service nova] Acquiring lock "refresh_cache-9975e756-b571-4e70-ba50-a6001d0b064c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.324587] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833807, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.193008} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.325216] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52084fb3-8ad5-d194-4d33-1176b8ca28ce, 'name': SearchDatastore_Task, 'duration_secs': 0.019306} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.327957] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 902.328619] env[68638]: DEBUG oslo_concurrency.lockutils [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 902.328619] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] fd6d5951-f2a1-422d-b137-4d19759f9060/fd6d5951-f2a1-422d-b137-4d19759f9060.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 902.329571] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cd71880-2bdd-49e7-a521-80ba516cc524 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.333506] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-444bd875-ce73-43fc-8631-d891ab974309 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.360326] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 9ba0f737-7947-409c-9163-79d621a29285/9ba0f737-7947-409c-9163-79d621a29285.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 902.370013] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37f7296f-8c65-4c85-b028-f174467082ca {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.380865] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Waiting for the task: (returnval){ [ 902.380865] env[68638]: value = "task-2833808" [ 902.380865] env[68638]: _type = "Task" [ 902.380865] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.391958] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Task: {'id': task-2833808, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.395960] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 902.395960] env[68638]: value = "task-2833809" [ 902.395960] env[68638]: _type = "Task" [ 902.395960] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.407099] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833809, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.438979] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5228de8-cf5e-4ff9-98c1-a6bf06f35a7d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.458283] env[68638]: DEBUG oslo_concurrency.lockutils [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "6200613c-b5de-4774-b0c6-fdb78b4c7267" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.458722] env[68638]: DEBUG oslo_concurrency.lockutils [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "6200613c-b5de-4774-b0c6-fdb78b4c7267" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.459083] env[68638]: DEBUG oslo_concurrency.lockutils [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "6200613c-b5de-4774-b0c6-fdb78b4c7267-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.459426] env[68638]: DEBUG oslo_concurrency.lockutils [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "6200613c-b5de-4774-b0c6-fdb78b4c7267-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.459746] env[68638]: DEBUG oslo_concurrency.lockutils [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "6200613c-b5de-4774-b0c6-fdb78b4c7267-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.463507] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd27dcc-6d71-4033-9b5b-1eca8bc0eef9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.471977] env[68638]: INFO nova.compute.manager [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Terminating instance [ 902.519926] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Releasing lock "refresh_cache-9975e756-b571-4e70-ba50-a6001d0b064c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 902.520470] env[68638]: DEBUG nova.compute.manager [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Instance network_info: |[{"id": "cf8deeee-8158-4035-a42c-831e6b8d6f83", "address": "fa:16:3e:97:f7:1f", "network": {"id": "104a324f-fd5a-4c74-9a7a-6126392ea10c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1310127541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3e5757d1f74492481048df4a29032ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf8deeee-81", "ovs_interfaceid": "cf8deeee-8158-4035-a42c-831e6b8d6f83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 902.522014] env[68638]: DEBUG oslo_concurrency.lockutils [req-0c2087b3-ea26-4c93-a7af-8c09b831c963 req-723389c6-39df-4d14-8b0b-4bd2800d7f90 service nova] Acquired lock "refresh_cache-9975e756-b571-4e70-ba50-a6001d0b064c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 902.522267] env[68638]: DEBUG nova.network.neutron [req-0c2087b3-ea26-4c93-a7af-8c09b831c963 req-723389c6-39df-4d14-8b0b-4bd2800d7f90 service nova] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Refreshing network info cache for port cf8deeee-8158-4035-a42c-831e6b8d6f83 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 902.523877] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:f7:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3d2e4070-a78e-4d08-a104-b6312ab65577', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf8deeee-8158-4035-a42c-831e6b8d6f83', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 902.536569] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 902.537602] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46bc6c8e-ddf9-4b17-8084-4fbdac7bf990 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.543555] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 902.544166] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3f408b5e-2a49-48a1-9a00-acfd91bed497 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.568240] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8838de9-359c-4236-a8e8-fb4aefcf799c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.573992] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 902.573992] env[68638]: value = "task-2833810" [ 902.573992] env[68638]: _type = "Task" [ 902.573992] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.594276] env[68638]: DEBUG nova.compute.provider_tree [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 902.606716] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833810, 'name': CreateVM_Task} progress is 15%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.638075] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquiring lock "3c3fcbca-2477-4037-a978-4b8e9ed0a690" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.638075] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "3c3fcbca-2477-4037-a978-4b8e9ed0a690" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.638075] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquiring lock "3c3fcbca-2477-4037-a978-4b8e9ed0a690-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.638075] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "3c3fcbca-2477-4037-a978-4b8e9ed0a690-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.638075] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "3c3fcbca-2477-4037-a978-4b8e9ed0a690-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.640973] env[68638]: INFO nova.compute.manager [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Terminating instance [ 902.645018] env[68638]: INFO nova.compute.manager [-] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Took 1.38 seconds to deallocate network for instance. [ 902.738291] env[68638]: DEBUG oslo_concurrency.lockutils [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.865027] env[68638]: DEBUG nova.compute.manager [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 902.867306] env[68638]: DEBUG nova.network.neutron [req-0c2087b3-ea26-4c93-a7af-8c09b831c963 req-723389c6-39df-4d14-8b0b-4bd2800d7f90 service nova] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Updated VIF entry in instance network info cache for port cf8deeee-8158-4035-a42c-831e6b8d6f83. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 902.869353] env[68638]: DEBUG nova.network.neutron [req-0c2087b3-ea26-4c93-a7af-8c09b831c963 req-723389c6-39df-4d14-8b0b-4bd2800d7f90 service nova] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Updating instance_info_cache with network_info: [{"id": "cf8deeee-8158-4035-a42c-831e6b8d6f83", "address": "fa:16:3e:97:f7:1f", "network": {"id": "104a324f-fd5a-4c74-9a7a-6126392ea10c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1310127541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3e5757d1f74492481048df4a29032ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf8deeee-81", "ovs_interfaceid": "cf8deeee-8158-4035-a42c-831e6b8d6f83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.896040] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Task: {'id': task-2833808, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.903747] env[68638]: DEBUG nova.virt.hardware [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 902.904102] env[68638]: DEBUG nova.virt.hardware [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 902.904460] env[68638]: DEBUG nova.virt.hardware [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 902.904689] env[68638]: DEBUG nova.virt.hardware [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 902.904849] env[68638]: DEBUG nova.virt.hardware [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 902.905032] env[68638]: DEBUG nova.virt.hardware [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 902.905326] env[68638]: DEBUG nova.virt.hardware [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 902.905547] env[68638]: DEBUG nova.virt.hardware [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 902.905734] env[68638]: DEBUG nova.virt.hardware [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 902.905909] env[68638]: DEBUG nova.virt.hardware [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 902.906147] env[68638]: DEBUG nova.virt.hardware [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 902.907164] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44fc661a-ed97-49fc-9fe9-82c01d30976c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.915072] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833809, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.921871] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ab11a7-cfaa-4cb0-a9c3-5218b9b57520 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.978319] env[68638]: DEBUG nova.compute.manager [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 902.978656] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 902.980328] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7e5d81-4613-4366-bd32-5fdbadf8dca0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.990987] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 902.991347] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a089f3cd-a127-43ac-888b-9af4e9126bd3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.000488] env[68638]: DEBUG oslo_vmware.api [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 903.000488] env[68638]: value = "task-2833811" [ 903.000488] env[68638]: _type = "Task" [ 903.000488] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.011999] env[68638]: DEBUG oslo_vmware.api [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833811, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.086052] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833810, 'name': CreateVM_Task, 'duration_secs': 0.472598} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.086253] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 903.086959] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.087147] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 903.087485] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 903.087759] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd69f01b-a594-4822-bc9e-990d325c0660 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.093884] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 903.093884] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]521edbf3-9026-07a8-b6b6-92b8c4f12f67" [ 903.093884] env[68638]: _type = "Task" [ 903.093884] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.098223] env[68638]: DEBUG nova.scheduler.client.report [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 903.105012] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]521edbf3-9026-07a8-b6b6-92b8c4f12f67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.146954] env[68638]: DEBUG nova.compute.manager [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 903.147277] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 903.148407] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ef02e6-cd08-4ee9-bda9-b12aa7dd07b3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.152699] env[68638]: DEBUG oslo_concurrency.lockutils [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 903.159364] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 903.159644] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fd2505e7-79b3-4a98-936c-8b76e5a2c912 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.167138] env[68638]: DEBUG oslo_vmware.api [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 903.167138] env[68638]: value = "task-2833812" [ 903.167138] env[68638]: _type = "Task" [ 903.167138] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.177614] env[68638]: DEBUG oslo_vmware.api [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833812, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.372248] env[68638]: DEBUG oslo_concurrency.lockutils [req-0c2087b3-ea26-4c93-a7af-8c09b831c963 req-723389c6-39df-4d14-8b0b-4bd2800d7f90 service nova] Releasing lock "refresh_cache-9975e756-b571-4e70-ba50-a6001d0b064c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 903.402323] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Task: {'id': task-2833808, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.782415} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.406498] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] fd6d5951-f2a1-422d-b137-4d19759f9060/fd6d5951-f2a1-422d-b137-4d19759f9060.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 903.406940] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 903.407401] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b7d558e5-5725-45d9-8ad8-07cf6c3d02bc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.416042] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833809, 'name': ReconfigVM_Task, 'duration_secs': 0.934988} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.417670] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 9ba0f737-7947-409c-9163-79d621a29285/9ba0f737-7947-409c-9163-79d621a29285.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 903.418654] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Waiting for the task: (returnval){ [ 903.418654] env[68638]: value = "task-2833813" [ 903.418654] env[68638]: _type = "Task" [ 903.418654] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.418970] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f0f98b08-260d-43db-ace4-0916d4973da8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.433330] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Task: {'id': task-2833813, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.436036] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 903.436036] env[68638]: value = "task-2833814" [ 903.436036] env[68638]: _type = "Task" [ 903.436036] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.459406] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833814, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.513172] env[68638]: DEBUG oslo_vmware.api [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833811, 'name': PowerOffVM_Task, 'duration_secs': 0.25266} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.514180] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 903.514180] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 903.514180] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-85ad7c5f-da00-484c-8c16-c7f6bae7b411 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.587021] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 903.587554] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 903.587894] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Deleting the datastore file [datastore1] 6200613c-b5de-4774-b0c6-fdb78b4c7267 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 903.588625] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d962bc4-d337-4f76-be6b-21129279cd91 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.599225] env[68638]: DEBUG oslo_vmware.api [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 903.599225] env[68638]: value = "task-2833816" [ 903.599225] env[68638]: _type = "Task" [ 903.599225] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.606771] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.792s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 903.611027] env[68638]: DEBUG nova.compute.manager [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 903.611027] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]521edbf3-9026-07a8-b6b6-92b8c4f12f67, 'name': SearchDatastore_Task, 'duration_secs': 0.016765} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.614279] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.740s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 903.616158] env[68638]: INFO nova.compute.claims [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 903.622662] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 903.622902] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 903.623163] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.623313] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 903.623501] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 903.627885] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-59c59401-1a70-4d46-bb3f-fed199298a04 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.630319] env[68638]: DEBUG oslo_vmware.api [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833816, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.643449] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 903.643875] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 903.644505] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a729e4a8-901c-4612-9874-34591de608f7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.650654] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 903.650654] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52da7b3a-dfc6-dc62-09f8-490dd00e3a62" [ 903.650654] env[68638]: _type = "Task" [ 903.650654] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.660037] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52da7b3a-dfc6-dc62-09f8-490dd00e3a62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.678617] env[68638]: DEBUG oslo_vmware.api [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833812, 'name': PowerOffVM_Task, 'duration_secs': 0.337307} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.678885] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 903.679067] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 903.679328] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51bbf816-ad42-48ba-97b3-781ca5f5391f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.750895] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 903.751313] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 903.751503] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Deleting the datastore file [datastore1] 3c3fcbca-2477-4037-a978-4b8e9ed0a690 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 903.752931] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b5e62eec-a00e-40bf-9a44-83a1db01fbf4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.760016] env[68638]: DEBUG oslo_vmware.api [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for the task: (returnval){ [ 903.760016] env[68638]: value = "task-2833818" [ 903.760016] env[68638]: _type = "Task" [ 903.760016] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.766989] env[68638]: DEBUG nova.network.neutron [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Successfully updated port: 99d48199-ae21-4f20-8c41-f96a59bcf89b {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 903.773283] env[68638]: DEBUG oslo_vmware.api [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833818, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.931374] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Task: {'id': task-2833813, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.143656} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.932444] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 903.932444] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f336b6-5f5a-43db-97ed-e453560c94a6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.957610] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] fd6d5951-f2a1-422d-b137-4d19759f9060/fd6d5951-f2a1-422d-b137-4d19759f9060.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 903.958413] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-15575a29-56c4-4335-b9f7-8a34c859cb4a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.975988] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833814, 'name': Rename_Task, 'duration_secs': 0.311445} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.976720] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 903.977044] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ed283206-9c5f-417d-bd33-0779a89a3297 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.983929] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 903.983929] env[68638]: value = "task-2833820" [ 903.983929] env[68638]: _type = "Task" [ 903.983929] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.985383] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Waiting for the task: (returnval){ [ 903.985383] env[68638]: value = "task-2833819" [ 903.985383] env[68638]: _type = "Task" [ 903.985383] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.997146] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Task: {'id': task-2833819, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.000376] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833820, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.110619] env[68638]: DEBUG oslo_vmware.api [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833816, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.447888} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.110619] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 904.110851] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 904.110851] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 904.111034] env[68638]: INFO nova.compute.manager [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Took 1.13 seconds to destroy the instance on the hypervisor. [ 904.111276] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 904.111478] env[68638]: DEBUG nova.compute.manager [-] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 904.111611] env[68638]: DEBUG nova.network.neutron [-] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 904.126113] env[68638]: DEBUG nova.compute.utils [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 904.127597] env[68638]: DEBUG nova.compute.manager [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 904.127738] env[68638]: DEBUG nova.network.neutron [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 904.166163] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52da7b3a-dfc6-dc62-09f8-490dd00e3a62, 'name': SearchDatastore_Task, 'duration_secs': 0.026277} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.166875] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5600cbf-f1ec-4df3-bf4a-488fa91ebdfb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.173189] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 904.173189] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52711a7d-44dd-95d7-4c48-ae4fd086603b" [ 904.173189] env[68638]: _type = "Task" [ 904.173189] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.182760] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52711a7d-44dd-95d7-4c48-ae4fd086603b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.270976] env[68638]: DEBUG oslo_vmware.api [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Task: {'id': task-2833818, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.442486} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.271352] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 904.271577] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 904.271751] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 904.271917] env[68638]: INFO nova.compute.manager [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Took 1.12 seconds to destroy the instance on the hypervisor. [ 904.272181] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 904.272378] env[68638]: DEBUG nova.compute.manager [-] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 904.272468] env[68638]: DEBUG nova.network.neutron [-] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 904.274867] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Acquiring lock "refresh_cache-1bc685aa-4e88-402f-b581-d179706b12a5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.275034] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Acquired lock "refresh_cache-1bc685aa-4e88-402f-b581-d179706b12a5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 904.275330] env[68638]: DEBUG nova.network.neutron [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 904.323480] env[68638]: DEBUG nova.compute.manager [req-df8448aa-6229-41b0-b33a-b74bd1b9e2f9 req-185bcae8-b033-4678-ac3b-99ae4bd82b43 service nova] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Received event network-vif-plugged-99d48199-ae21-4f20-8c41-f96a59bcf89b {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 904.323710] env[68638]: DEBUG oslo_concurrency.lockutils [req-df8448aa-6229-41b0-b33a-b74bd1b9e2f9 req-185bcae8-b033-4678-ac3b-99ae4bd82b43 service nova] Acquiring lock "1bc685aa-4e88-402f-b581-d179706b12a5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.323945] env[68638]: DEBUG oslo_concurrency.lockutils [req-df8448aa-6229-41b0-b33a-b74bd1b9e2f9 req-185bcae8-b033-4678-ac3b-99ae4bd82b43 service nova] Lock "1bc685aa-4e88-402f-b581-d179706b12a5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.324072] env[68638]: DEBUG oslo_concurrency.lockutils [req-df8448aa-6229-41b0-b33a-b74bd1b9e2f9 req-185bcae8-b033-4678-ac3b-99ae4bd82b43 service nova] Lock "1bc685aa-4e88-402f-b581-d179706b12a5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.324317] env[68638]: DEBUG nova.compute.manager [req-df8448aa-6229-41b0-b33a-b74bd1b9e2f9 req-185bcae8-b033-4678-ac3b-99ae4bd82b43 service nova] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] No waiting events found dispatching network-vif-plugged-99d48199-ae21-4f20-8c41-f96a59bcf89b {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 904.324536] env[68638]: WARNING nova.compute.manager [req-df8448aa-6229-41b0-b33a-b74bd1b9e2f9 req-185bcae8-b033-4678-ac3b-99ae4bd82b43 service nova] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Received unexpected event network-vif-plugged-99d48199-ae21-4f20-8c41-f96a59bcf89b for instance with vm_state building and task_state spawning. [ 904.324713] env[68638]: DEBUG nova.compute.manager [req-df8448aa-6229-41b0-b33a-b74bd1b9e2f9 req-185bcae8-b033-4678-ac3b-99ae4bd82b43 service nova] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Received event network-changed-99d48199-ae21-4f20-8c41-f96a59bcf89b {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 904.324950] env[68638]: DEBUG nova.compute.manager [req-df8448aa-6229-41b0-b33a-b74bd1b9e2f9 req-185bcae8-b033-4678-ac3b-99ae4bd82b43 service nova] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Refreshing instance network info cache due to event network-changed-99d48199-ae21-4f20-8c41-f96a59bcf89b. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 904.325045] env[68638]: DEBUG oslo_concurrency.lockutils [req-df8448aa-6229-41b0-b33a-b74bd1b9e2f9 req-185bcae8-b033-4678-ac3b-99ae4bd82b43 service nova] Acquiring lock "refresh_cache-1bc685aa-4e88-402f-b581-d179706b12a5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.415691] env[68638]: DEBUG nova.policy [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '87dbe1b58a124d8ba72432b58a711496', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03a342a1ef674059b9ab1a5dc050a82d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 904.502916] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Task: {'id': task-2833819, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.506725] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833820, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.631435] env[68638]: DEBUG nova.compute.manager [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 904.688242] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52711a7d-44dd-95d7-4c48-ae4fd086603b, 'name': SearchDatastore_Task, 'duration_secs': 0.011137} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.688767] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 904.689042] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 9975e756-b571-4e70-ba50-a6001d0b064c/9975e756-b571-4e70-ba50-a6001d0b064c.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 904.689318] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a6ca0027-fc91-49f8-80fb-6f6ff40aa15a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.699942] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 904.699942] env[68638]: value = "task-2833821" [ 904.699942] env[68638]: _type = "Task" [ 904.699942] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.708972] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833821, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.793474] env[68638]: DEBUG nova.network.neutron [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Successfully created port: a087b668-2b77-40e4-8a37-af9d56aa8b57 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 904.825081] env[68638]: DEBUG nova.network.neutron [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 904.921317] env[68638]: DEBUG nova.network.neutron [-] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.010399] env[68638]: DEBUG oslo_vmware.api [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833820, 'name': PowerOnVM_Task, 'duration_secs': 0.558369} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.020348] env[68638]: DEBUG nova.network.neutron [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Updating instance_info_cache with network_info: [{"id": "99d48199-ae21-4f20-8c41-f96a59bcf89b", "address": "fa:16:3e:e4:a3:b9", "network": {"id": "16e43814-e88b-4088-86c9-adf3cc9f4d54", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1799385003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3f84fb9b3b0442d89b45cc44b0eda16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6fab536-1e48-4d07-992a-076f0e6d089c", "external-id": "nsx-vlan-transportzone-61", "segmentation_id": 61, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99d48199-ae", "ovs_interfaceid": "99d48199-ae21-4f20-8c41-f96a59bcf89b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.022372] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 905.022971] env[68638]: INFO nova.compute.manager [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Took 10.22 seconds to spawn the instance on the hypervisor. [ 905.023379] env[68638]: DEBUG nova.compute.manager [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 905.024084] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Task: {'id': task-2833819, 'name': ReconfigVM_Task, 'duration_secs': 0.850552} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.026018] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9bfb33b-8cbb-43ab-8b89-5a82939dcc70 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.030346] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Reconfigured VM instance instance-00000048 to attach disk [datastore1] fd6d5951-f2a1-422d-b137-4d19759f9060/fd6d5951-f2a1-422d-b137-4d19759f9060.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 905.031383] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-57317558-4739-4416-ba06-493f5afff9a1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.055305] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Waiting for the task: (returnval){ [ 905.055305] env[68638]: value = "task-2833822" [ 905.055305] env[68638]: _type = "Task" [ 905.055305] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.079032] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Task: {'id': task-2833822, 'name': Rename_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.125511] env[68638]: DEBUG nova.network.neutron [-] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.220696] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833821, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.284441] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb01774-bafc-4c2c-bbd8-f226df4074aa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.294220] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef0cf5a2-6c37-4efe-9493-91abbd9d096f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.332355] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65433409-b2da-4720-89e0-a17f5b923fdf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.343029] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c97fe209-e08e-4e75-aeb9-4bb6db2ef92f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.362736] env[68638]: DEBUG nova.compute.provider_tree [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 905.422778] env[68638]: INFO nova.compute.manager [-] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Took 1.31 seconds to deallocate network for instance. [ 905.526060] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Releasing lock "refresh_cache-1bc685aa-4e88-402f-b581-d179706b12a5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 905.526060] env[68638]: DEBUG nova.compute.manager [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Instance network_info: |[{"id": "99d48199-ae21-4f20-8c41-f96a59bcf89b", "address": "fa:16:3e:e4:a3:b9", "network": {"id": "16e43814-e88b-4088-86c9-adf3cc9f4d54", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1799385003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3f84fb9b3b0442d89b45cc44b0eda16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6fab536-1e48-4d07-992a-076f0e6d089c", "external-id": "nsx-vlan-transportzone-61", "segmentation_id": 61, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99d48199-ae", "ovs_interfaceid": "99d48199-ae21-4f20-8c41-f96a59bcf89b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 905.526489] env[68638]: DEBUG oslo_concurrency.lockutils [req-df8448aa-6229-41b0-b33a-b74bd1b9e2f9 req-185bcae8-b033-4678-ac3b-99ae4bd82b43 service nova] Acquired lock "refresh_cache-1bc685aa-4e88-402f-b581-d179706b12a5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 905.526489] env[68638]: DEBUG nova.network.neutron [req-df8448aa-6229-41b0-b33a-b74bd1b9e2f9 req-185bcae8-b033-4678-ac3b-99ae4bd82b43 service nova] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Refreshing network info cache for port 99d48199-ae21-4f20-8c41-f96a59bcf89b {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 905.530038] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:a3:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd6fab536-1e48-4d07-992a-076f0e6d089c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '99d48199-ae21-4f20-8c41-f96a59bcf89b', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 905.536981] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Creating folder: Project (c3f84fb9b3b0442d89b45cc44b0eda16). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 905.538217] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9c7e8c80-e42d-4c02-a70d-daf036e1e7e3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.555042] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Created folder: Project (c3f84fb9b3b0442d89b45cc44b0eda16) in parent group-v569734. [ 905.555301] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Creating folder: Instances. Parent ref: group-v569939. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 905.556087] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e343f5c8-aa75-41bb-adf2-08f0751016f7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.570224] env[68638]: INFO nova.compute.manager [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Took 56.40 seconds to build instance. [ 905.576985] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Task: {'id': task-2833822, 'name': Rename_Task, 'duration_secs': 0.207948} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.577292] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 905.577562] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f4e642c-0214-47a5-ae78-770fd2be2c13 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.583631] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Created folder: Instances in parent group-v569939. [ 905.583631] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 905.583631] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 905.583631] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8f83a92c-27d9-4cde-b7f9-abdb9e88ec01 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.599185] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Waiting for the task: (returnval){ [ 905.599185] env[68638]: value = "task-2833825" [ 905.599185] env[68638]: _type = "Task" [ 905.599185] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.606828] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 905.606828] env[68638]: value = "task-2833826" [ 905.606828] env[68638]: _type = "Task" [ 905.606828] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.611883] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Task: {'id': task-2833825, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.622036] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833826, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.628873] env[68638]: INFO nova.compute.manager [-] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Took 1.36 seconds to deallocate network for instance. [ 905.646191] env[68638]: DEBUG nova.compute.manager [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 905.675956] env[68638]: DEBUG nova.virt.hardware [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 905.676209] env[68638]: DEBUG nova.virt.hardware [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 905.676370] env[68638]: DEBUG nova.virt.hardware [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 905.676560] env[68638]: DEBUG nova.virt.hardware [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 905.676698] env[68638]: DEBUG nova.virt.hardware [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 905.676844] env[68638]: DEBUG nova.virt.hardware [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 905.677119] env[68638]: DEBUG nova.virt.hardware [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 905.677303] env[68638]: DEBUG nova.virt.hardware [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 905.677482] env[68638]: DEBUG nova.virt.hardware [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 905.677649] env[68638]: DEBUG nova.virt.hardware [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 905.677863] env[68638]: DEBUG nova.virt.hardware [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 905.678767] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05e519bd-7636-4d76-9173-53bdc2d762b0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.687959] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7307c22-64e9-45b8-ab0c-5fe05bdc97e2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.713889] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833821, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.852237} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.714201] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 9975e756-b571-4e70-ba50-a6001d0b064c/9975e756-b571-4e70-ba50-a6001d0b064c.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 905.714396] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 905.714695] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fe44b3e4-4ed1-4669-b886-08afb7feb9f9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.724372] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 905.724372] env[68638]: value = "task-2833827" [ 905.724372] env[68638]: _type = "Task" [ 905.724372] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.733253] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833827, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.869140] env[68638]: DEBUG nova.scheduler.client.report [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 905.930436] env[68638]: DEBUG oslo_concurrency.lockutils [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.072690] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2c8ad961-47fe-4927-b611-7652aa37183d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "9ba0f737-7947-409c-9163-79d621a29285" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.526s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.110763] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Task: {'id': task-2833825, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.121173] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833826, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.136403] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.237914] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833827, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.127389} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.238371] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 906.239685] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88131940-235d-46ec-a91a-9c7526f44ee3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.265854] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] 9975e756-b571-4e70-ba50-a6001d0b064c/9975e756-b571-4e70-ba50-a6001d0b064c.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 906.266220] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3092f394-5ace-4698-8cb0-02c21461f4e7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.283467] env[68638]: DEBUG nova.network.neutron [req-df8448aa-6229-41b0-b33a-b74bd1b9e2f9 req-185bcae8-b033-4678-ac3b-99ae4bd82b43 service nova] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Updated VIF entry in instance network info cache for port 99d48199-ae21-4f20-8c41-f96a59bcf89b. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 906.283873] env[68638]: DEBUG nova.network.neutron [req-df8448aa-6229-41b0-b33a-b74bd1b9e2f9 req-185bcae8-b033-4678-ac3b-99ae4bd82b43 service nova] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Updating instance_info_cache with network_info: [{"id": "99d48199-ae21-4f20-8c41-f96a59bcf89b", "address": "fa:16:3e:e4:a3:b9", "network": {"id": "16e43814-e88b-4088-86c9-adf3cc9f4d54", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1799385003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3f84fb9b3b0442d89b45cc44b0eda16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6fab536-1e48-4d07-992a-076f0e6d089c", "external-id": "nsx-vlan-transportzone-61", "segmentation_id": 61, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99d48199-ae", "ovs_interfaceid": "99d48199-ae21-4f20-8c41-f96a59bcf89b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.294036] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 906.294036] env[68638]: value = "task-2833828" [ 906.294036] env[68638]: _type = "Task" [ 906.294036] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.305814] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833828, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.377302] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.766s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.378304] env[68638]: DEBUG nova.compute.manager [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 906.385678] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.423s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.387977] env[68638]: INFO nova.compute.claims [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 906.479223] env[68638]: DEBUG nova.compute.manager [req-d2bc8b9e-7926-4b5c-9327-179b0ce799b0 req-85f004d9-0b8e-4c8e-822e-a122d810c7a7 service nova] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Received event network-vif-deleted-74220954-1e9b-4dd4-a7a3-6a799a426d21 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 906.479621] env[68638]: DEBUG nova.compute.manager [req-d2bc8b9e-7926-4b5c-9327-179b0ce799b0 req-85f004d9-0b8e-4c8e-822e-a122d810c7a7 service nova] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Received event network-vif-deleted-41008c1d-c94d-416f-8c08-9f52170f20c0 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 906.558189] env[68638]: DEBUG nova.compute.manager [req-7b3c71d5-11ba-44ac-ae91-68d32de329e3 req-070a8dcd-625d-47cd-b4e4-df99db4f4a04 service nova] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Received event network-vif-plugged-a087b668-2b77-40e4-8a37-af9d56aa8b57 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 906.558510] env[68638]: DEBUG oslo_concurrency.lockutils [req-7b3c71d5-11ba-44ac-ae91-68d32de329e3 req-070a8dcd-625d-47cd-b4e4-df99db4f4a04 service nova] Acquiring lock "53e92f51-9010-4fb2-89e1-9d16a252ef6e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.558619] env[68638]: DEBUG oslo_concurrency.lockutils [req-7b3c71d5-11ba-44ac-ae91-68d32de329e3 req-070a8dcd-625d-47cd-b4e4-df99db4f4a04 service nova] Lock "53e92f51-9010-4fb2-89e1-9d16a252ef6e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.558787] env[68638]: DEBUG oslo_concurrency.lockutils [req-7b3c71d5-11ba-44ac-ae91-68d32de329e3 req-070a8dcd-625d-47cd-b4e4-df99db4f4a04 service nova] Lock "53e92f51-9010-4fb2-89e1-9d16a252ef6e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.558956] env[68638]: DEBUG nova.compute.manager [req-7b3c71d5-11ba-44ac-ae91-68d32de329e3 req-070a8dcd-625d-47cd-b4e4-df99db4f4a04 service nova] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] No waiting events found dispatching network-vif-plugged-a087b668-2b77-40e4-8a37-af9d56aa8b57 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 906.559165] env[68638]: WARNING nova.compute.manager [req-7b3c71d5-11ba-44ac-ae91-68d32de329e3 req-070a8dcd-625d-47cd-b4e4-df99db4f4a04 service nova] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Received unexpected event network-vif-plugged-a087b668-2b77-40e4-8a37-af9d56aa8b57 for instance with vm_state building and task_state spawning. [ 906.613220] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Task: {'id': task-2833825, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.627283] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833826, 'name': CreateVM_Task, 'duration_secs': 0.822138} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.630562] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 906.631721] env[68638]: DEBUG nova.network.neutron [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Successfully updated port: a087b668-2b77-40e4-8a37-af9d56aa8b57 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 906.634024] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.634024] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 906.634024] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 906.634203] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d803b8e-6db9-4e14-a067-a08e3cbfcc9b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.640973] env[68638]: DEBUG oslo_vmware.api [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Waiting for the task: (returnval){ [ 906.640973] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52620fc6-30fb-9815-70cb-7ab56d42a8ef" [ 906.640973] env[68638]: _type = "Task" [ 906.640973] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.651849] env[68638]: DEBUG oslo_vmware.api [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52620fc6-30fb-9815-70cb-7ab56d42a8ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.787966] env[68638]: DEBUG oslo_concurrency.lockutils [req-df8448aa-6229-41b0-b33a-b74bd1b9e2f9 req-185bcae8-b033-4678-ac3b-99ae4bd82b43 service nova] Releasing lock "refresh_cache-1bc685aa-4e88-402f-b581-d179706b12a5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 906.805787] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833828, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.896563] env[68638]: DEBUG nova.compute.utils [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 906.900253] env[68638]: DEBUG nova.compute.manager [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 906.900433] env[68638]: DEBUG nova.network.neutron [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 906.948170] env[68638]: DEBUG nova.policy [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '847f535ec96f4ef0b73ae277199b4533', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98a35cb6ae4d4c8688fb89d7da0b2dd1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 907.111795] env[68638]: DEBUG oslo_vmware.api [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Task: {'id': task-2833825, 'name': PowerOnVM_Task, 'duration_secs': 1.227991} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.112084] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 907.112301] env[68638]: INFO nova.compute.manager [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Took 9.70 seconds to spawn the instance on the hypervisor. [ 907.112484] env[68638]: DEBUG nova.compute.manager [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 907.113304] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-634c29b0-e8ee-4ad4-8fc0-3426ae7b2463 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.132282] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "9ba0f737-7947-409c-9163-79d621a29285" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.132282] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "9ba0f737-7947-409c-9163-79d621a29285" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.132282] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "9ba0f737-7947-409c-9163-79d621a29285-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.132282] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "9ba0f737-7947-409c-9163-79d621a29285-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.132282] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "9ba0f737-7947-409c-9163-79d621a29285-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.137525] env[68638]: INFO nova.compute.manager [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Terminating instance [ 907.143116] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "refresh_cache-53e92f51-9010-4fb2-89e1-9d16a252ef6e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.143116] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired lock "refresh_cache-53e92f51-9010-4fb2-89e1-9d16a252ef6e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 907.143116] env[68638]: DEBUG nova.network.neutron [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 907.155702] env[68638]: DEBUG oslo_vmware.api [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52620fc6-30fb-9815-70cb-7ab56d42a8ef, 'name': SearchDatastore_Task, 'duration_secs': 0.025288} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.156593] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 907.156807] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 907.157049] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.157197] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 907.157376] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 907.157866] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-410c6280-ee44-427f-9cc2-292dbf6e83bc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.170642] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 907.171214] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 907.172573] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd50e8a2-6f6c-466f-93da-e0706f129dae {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.180153] env[68638]: DEBUG oslo_vmware.api [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Waiting for the task: (returnval){ [ 907.180153] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]522861d0-0253-c832-acbf-64647e69b5d2" [ 907.180153] env[68638]: _type = "Task" [ 907.180153] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.190717] env[68638]: DEBUG oslo_vmware.api [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]522861d0-0253-c832-acbf-64647e69b5d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.300494] env[68638]: DEBUG nova.network.neutron [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Successfully created port: c6b422b3-9642-4a7f-a4b7-848cbba4f147 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 907.309173] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833828, 'name': ReconfigVM_Task, 'duration_secs': 0.560991} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.309472] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Reconfigured VM instance instance-00000049 to attach disk [datastore1] 9975e756-b571-4e70-ba50-a6001d0b064c/9975e756-b571-4e70-ba50-a6001d0b064c.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 907.310272] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ceadba55-bbb2-46bf-bdc8-4904b17d1fdd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.318481] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 907.318481] env[68638]: value = "task-2833829" [ 907.318481] env[68638]: _type = "Task" [ 907.318481] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.333442] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833829, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.401480] env[68638]: DEBUG nova.compute.manager [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 907.633106] env[68638]: INFO nova.compute.manager [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Took 52.66 seconds to build instance. [ 907.649906] env[68638]: DEBUG nova.compute.manager [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 907.650146] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 907.652373] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e6e84e-427d-4aa0-ad53-3652db9a28e1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.666853] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 907.668513] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d725c22-4511-45cf-a306-a29ed602d69b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.685965] env[68638]: DEBUG oslo_vmware.api [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 907.685965] env[68638]: value = "task-2833830" [ 907.685965] env[68638]: _type = "Task" [ 907.685965] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.697597] env[68638]: DEBUG oslo_vmware.api [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]522861d0-0253-c832-acbf-64647e69b5d2, 'name': SearchDatastore_Task, 'duration_secs': 0.012903} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.702370] env[68638]: DEBUG nova.network.neutron [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 907.706164] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d96aa3e6-1707-4f41-a726-2f01d1b4ba22 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.714054] env[68638]: DEBUG oslo_vmware.api [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833830, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.718759] env[68638]: DEBUG oslo_vmware.api [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Waiting for the task: (returnval){ [ 907.718759] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52bfeaaa-35f1-c558-cf7f-fc9d1f28bd64" [ 907.718759] env[68638]: _type = "Task" [ 907.718759] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.728793] env[68638]: DEBUG oslo_vmware.api [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52bfeaaa-35f1-c558-cf7f-fc9d1f28bd64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.831432] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833829, 'name': Rename_Task, 'duration_secs': 0.238475} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.831726] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 907.831973] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5bd2d55f-208d-4a25-a085-806b0d5cad3b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.840869] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 907.840869] env[68638]: value = "task-2833831" [ 907.840869] env[68638]: _type = "Task" [ 907.840869] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.857325] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833831, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.930158] env[68638]: DEBUG nova.network.neutron [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Updating instance_info_cache with network_info: [{"id": "a087b668-2b77-40e4-8a37-af9d56aa8b57", "address": "fa:16:3e:04:7f:c9", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa087b668-2b", "ovs_interfaceid": "a087b668-2b77-40e4-8a37-af9d56aa8b57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.956195] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a845038e-09f8-4510-b439-2d6512c6c5ac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.965812] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84a1030-63bb-4ac6-bffd-f30905f596a4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.000458] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f0e88fe-ec2d-44ca-9661-21d257052c20 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.011821] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa0da1e-8102-47f3-8e06-8afda7e2c4b6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.027708] env[68638]: DEBUG nova.compute.provider_tree [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.138131] env[68638]: DEBUG oslo_concurrency.lockutils [None req-217ee992-9198-4e7a-bbdc-085cbfafc20f tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Lock "fd6d5951-f2a1-422d-b137-4d19759f9060" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.681s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.196725] env[68638]: DEBUG oslo_vmware.api [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833830, 'name': PowerOffVM_Task, 'duration_secs': 0.230983} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.197014] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 908.197203] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 908.197492] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d46b5253-d520-491d-a21a-afa000439ef1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.230176] env[68638]: DEBUG oslo_vmware.api [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52bfeaaa-35f1-c558-cf7f-fc9d1f28bd64, 'name': SearchDatastore_Task, 'duration_secs': 0.013012} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.230783] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 908.231071] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 1bc685aa-4e88-402f-b581-d179706b12a5/1bc685aa-4e88-402f-b581-d179706b12a5.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 908.231355] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6354c0c8-4bf4-4e3d-994c-b0625141ec0c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.238277] env[68638]: DEBUG oslo_vmware.api [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Waiting for the task: (returnval){ [ 908.238277] env[68638]: value = "task-2833833" [ 908.238277] env[68638]: _type = "Task" [ 908.238277] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.248058] env[68638]: DEBUG oslo_vmware.api [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Task: {'id': task-2833833, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.263562] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 908.263856] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 908.264477] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Deleting the datastore file [datastore1] 9ba0f737-7947-409c-9163-79d621a29285 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 908.264477] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b6f6cccc-1c5d-46e3-ab49-b2dcaad25222 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.274030] env[68638]: DEBUG oslo_vmware.api [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 908.274030] env[68638]: value = "task-2833834" [ 908.274030] env[68638]: _type = "Task" [ 908.274030] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.283776] env[68638]: DEBUG oslo_vmware.api [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833834, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.351977] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833831, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.418226] env[68638]: DEBUG nova.compute.manager [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 908.433382] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Releasing lock "refresh_cache-53e92f51-9010-4fb2-89e1-9d16a252ef6e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 908.433750] env[68638]: DEBUG nova.compute.manager [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Instance network_info: |[{"id": "a087b668-2b77-40e4-8a37-af9d56aa8b57", "address": "fa:16:3e:04:7f:c9", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa087b668-2b", "ovs_interfaceid": "a087b668-2b77-40e4-8a37-af9d56aa8b57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 908.434322] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:7f:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2180b40f-2bb0-47da-ba80-c2fbe7f98af0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a087b668-2b77-40e4-8a37-af9d56aa8b57', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 908.441934] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 908.444405] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 908.444852] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7632f2b2-fbed-4fca-be47-1b013c723a78 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.462272] env[68638]: DEBUG nova.virt.hardware [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 908.462545] env[68638]: DEBUG nova.virt.hardware [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 908.462705] env[68638]: DEBUG nova.virt.hardware [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 908.462889] env[68638]: DEBUG nova.virt.hardware [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 908.463045] env[68638]: DEBUG nova.virt.hardware [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 908.463235] env[68638]: DEBUG nova.virt.hardware [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 908.463400] env[68638]: DEBUG nova.virt.hardware [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 908.463565] env[68638]: DEBUG nova.virt.hardware [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 908.463750] env[68638]: DEBUG nova.virt.hardware [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 908.463986] env[68638]: DEBUG nova.virt.hardware [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 908.464235] env[68638]: DEBUG nova.virt.hardware [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 908.465117] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ca91f8-b6a7-4553-8812-1d08e94626bb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.475477] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a04041bd-fb5a-463b-b899-7bb535e1a830 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.480282] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 908.480282] env[68638]: value = "task-2833835" [ 908.480282] env[68638]: _type = "Task" [ 908.480282] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.497988] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833835, 'name': CreateVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.531235] env[68638]: DEBUG nova.scheduler.client.report [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 908.592540] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Acquiring lock "fd6d5951-f2a1-422d-b137-4d19759f9060" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 908.592961] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Lock "fd6d5951-f2a1-422d-b137-4d19759f9060" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.593125] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Acquiring lock "fd6d5951-f2a1-422d-b137-4d19759f9060-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 908.593291] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Lock "fd6d5951-f2a1-422d-b137-4d19759f9060-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.593460] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Lock "fd6d5951-f2a1-422d-b137-4d19759f9060-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.595766] env[68638]: INFO nova.compute.manager [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Terminating instance [ 908.600632] env[68638]: DEBUG nova.compute.manager [req-9b727248-9021-4739-8ed1-6d98b53f9250 req-f7a371f2-e134-490a-b365-498b9c853f65 service nova] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Received event network-changed-a087b668-2b77-40e4-8a37-af9d56aa8b57 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 908.600820] env[68638]: DEBUG nova.compute.manager [req-9b727248-9021-4739-8ed1-6d98b53f9250 req-f7a371f2-e134-490a-b365-498b9c853f65 service nova] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Refreshing instance network info cache due to event network-changed-a087b668-2b77-40e4-8a37-af9d56aa8b57. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 908.601032] env[68638]: DEBUG oslo_concurrency.lockutils [req-9b727248-9021-4739-8ed1-6d98b53f9250 req-f7a371f2-e134-490a-b365-498b9c853f65 service nova] Acquiring lock "refresh_cache-53e92f51-9010-4fb2-89e1-9d16a252ef6e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.601180] env[68638]: DEBUG oslo_concurrency.lockutils [req-9b727248-9021-4739-8ed1-6d98b53f9250 req-f7a371f2-e134-490a-b365-498b9c853f65 service nova] Acquired lock "refresh_cache-53e92f51-9010-4fb2-89e1-9d16a252ef6e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 908.601353] env[68638]: DEBUG nova.network.neutron [req-9b727248-9021-4739-8ed1-6d98b53f9250 req-f7a371f2-e134-490a-b365-498b9c853f65 service nova] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Refreshing network info cache for port a087b668-2b77-40e4-8a37-af9d56aa8b57 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 908.749640] env[68638]: DEBUG oslo_vmware.api [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Task: {'id': task-2833833, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.5032} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.750174] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 1bc685aa-4e88-402f-b581-d179706b12a5/1bc685aa-4e88-402f-b581-d179706b12a5.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 908.750590] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 908.750894] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-81a31658-f9b3-4938-87a8-6a2f7133e3f7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.759441] env[68638]: DEBUG oslo_vmware.api [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Waiting for the task: (returnval){ [ 908.759441] env[68638]: value = "task-2833836" [ 908.759441] env[68638]: _type = "Task" [ 908.759441] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.768532] env[68638]: DEBUG oslo_vmware.api [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Task: {'id': task-2833836, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.784789] env[68638]: DEBUG oslo_vmware.api [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833834, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192881} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.785276] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 908.785582] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 908.785836] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 908.786039] env[68638]: INFO nova.compute.manager [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Took 1.14 seconds to destroy the instance on the hypervisor. [ 908.786362] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 908.786777] env[68638]: DEBUG nova.compute.manager [-] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 908.786899] env[68638]: DEBUG nova.network.neutron [-] [instance: 9ba0f737-7947-409c-9163-79d621a29285] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 908.857584] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833831, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.950512] env[68638]: DEBUG nova.network.neutron [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Successfully updated port: c6b422b3-9642-4a7f-a4b7-848cbba4f147 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 908.991539] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833835, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.036689] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.651s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.037203] env[68638]: DEBUG nova.compute.manager [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 909.039818] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.805s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.040029] env[68638]: DEBUG nova.objects.instance [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lazy-loading 'resources' on Instance uuid 17f6cd0a-bbc1-47c3-9c36-2166ba448de2 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 909.103521] env[68638]: DEBUG nova.compute.manager [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 909.103770] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 909.106968] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a8eef9b-655f-4d6e-80f8-97f7dc18dad2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.118242] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 909.118743] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b9ed18ec-3660-4333-bf4d-7f535e14fa04 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.126010] env[68638]: DEBUG oslo_vmware.api [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Waiting for the task: (returnval){ [ 909.126010] env[68638]: value = "task-2833837" [ 909.126010] env[68638]: _type = "Task" [ 909.126010] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.135768] env[68638]: DEBUG oslo_vmware.api [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Task: {'id': task-2833837, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.181992] env[68638]: DEBUG nova.compute.manager [req-0341d82d-3cf4-4eb5-b891-0f33e69503b0 req-7972976c-30a4-4d40-acc6-20bc50535dde service nova] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Received event network-vif-deleted-43a3db27-15d1-4114-b5f5-63529cba0444 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 909.182250] env[68638]: INFO nova.compute.manager [req-0341d82d-3cf4-4eb5-b891-0f33e69503b0 req-7972976c-30a4-4d40-acc6-20bc50535dde service nova] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Neutron deleted interface 43a3db27-15d1-4114-b5f5-63529cba0444; detaching it from the instance and deleting it from the info cache [ 909.182428] env[68638]: DEBUG nova.network.neutron [req-0341d82d-3cf4-4eb5-b891-0f33e69503b0 req-7972976c-30a4-4d40-acc6-20bc50535dde service nova] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.270546] env[68638]: DEBUG oslo_vmware.api [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Task: {'id': task-2833836, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079897} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.270828] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 909.272112] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d73cb550-68ea-4488-baab-5e874940a55e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.298531] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] 1bc685aa-4e88-402f-b581-d179706b12a5/1bc685aa-4e88-402f-b581-d179706b12a5.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 909.302264] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac6004d7-43af-40c5-ae8d-30863b071100 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.331323] env[68638]: DEBUG oslo_vmware.api [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Waiting for the task: (returnval){ [ 909.331323] env[68638]: value = "task-2833838" [ 909.331323] env[68638]: _type = "Task" [ 909.331323] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.341106] env[68638]: DEBUG oslo_vmware.api [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Task: {'id': task-2833838, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.353632] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833831, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.393358] env[68638]: DEBUG nova.network.neutron [req-9b727248-9021-4739-8ed1-6d98b53f9250 req-f7a371f2-e134-490a-b365-498b9c853f65 service nova] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Updated VIF entry in instance network info cache for port a087b668-2b77-40e4-8a37-af9d56aa8b57. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 909.393792] env[68638]: DEBUG nova.network.neutron [req-9b727248-9021-4739-8ed1-6d98b53f9250 req-f7a371f2-e134-490a-b365-498b9c853f65 service nova] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Updating instance_info_cache with network_info: [{"id": "a087b668-2b77-40e4-8a37-af9d56aa8b57", "address": "fa:16:3e:04:7f:c9", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa087b668-2b", "ovs_interfaceid": "a087b668-2b77-40e4-8a37-af9d56aa8b57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.452915] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "refresh_cache-d2d30008-5058-4be3-b803-00d8ca4450d5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.453134] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired lock "refresh_cache-d2d30008-5058-4be3-b803-00d8ca4450d5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 909.453344] env[68638]: DEBUG nova.network.neutron [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 909.491869] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833835, 'name': CreateVM_Task, 'duration_secs': 0.627745} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.494075] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 909.494075] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.494075] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 909.494075] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 909.494075] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16e4939e-3160-4a5b-89e2-5845c3eaf0f5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.499417] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 909.499417] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5261c000-4fec-7290-4234-16d84f7700ac" [ 909.499417] env[68638]: _type = "Task" [ 909.499417] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.508529] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5261c000-4fec-7290-4234-16d84f7700ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.543661] env[68638]: DEBUG nova.compute.utils [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 909.545138] env[68638]: DEBUG nova.compute.manager [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 909.545314] env[68638]: DEBUG nova.network.neutron [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 909.599234] env[68638]: DEBUG nova.policy [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'adf3da1f81694585b727a7b0528dfeb3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '46bace7ece424608bf9f88293ba6364c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 909.600785] env[68638]: DEBUG nova.network.neutron [-] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.640972] env[68638]: DEBUG oslo_vmware.api [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Task: {'id': task-2833837, 'name': PowerOffVM_Task, 'duration_secs': 0.242118} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.643875] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 909.644087] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 909.644566] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-920e6a27-622e-4d18-a1ba-6236561d0eea {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.686589] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-26587211-dc94-4f13-bb01-4e04b41d83ea {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.697174] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-189faf4e-050f-4f56-aadd-1f7c842263b8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.742657] env[68638]: DEBUG nova.compute.manager [req-0341d82d-3cf4-4eb5-b891-0f33e69503b0 req-7972976c-30a4-4d40-acc6-20bc50535dde service nova] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Detach interface failed, port_id=43a3db27-15d1-4114-b5f5-63529cba0444, reason: Instance 9ba0f737-7947-409c-9163-79d621a29285 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 909.747316] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 909.747531] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 909.747739] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Deleting the datastore file [datastore1] fd6d5951-f2a1-422d-b137-4d19759f9060 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 909.748250] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06ba315b-0635-435e-a59a-dd253cb8ea97 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.757148] env[68638]: DEBUG oslo_vmware.api [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Waiting for the task: (returnval){ [ 909.757148] env[68638]: value = "task-2833840" [ 909.757148] env[68638]: _type = "Task" [ 909.757148] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.767856] env[68638]: DEBUG oslo_vmware.api [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Task: {'id': task-2833840, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.843509] env[68638]: DEBUG oslo_vmware.api [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Task: {'id': task-2833838, 'name': ReconfigVM_Task, 'duration_secs': 0.486563} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.843877] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Reconfigured VM instance instance-0000004a to attach disk [datastore1] 1bc685aa-4e88-402f-b581-d179706b12a5/1bc685aa-4e88-402f-b581-d179706b12a5.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 909.844684] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3228407e-cd46-4836-9829-6120dbbcf6c4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.861318] env[68638]: DEBUG oslo_vmware.api [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833831, 'name': PowerOnVM_Task, 'duration_secs': 1.787511} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.862783] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 909.864074] env[68638]: INFO nova.compute.manager [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Took 9.77 seconds to spawn the instance on the hypervisor. [ 909.864074] env[68638]: DEBUG nova.compute.manager [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 909.864074] env[68638]: DEBUG oslo_vmware.api [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Waiting for the task: (returnval){ [ 909.864074] env[68638]: value = "task-2833841" [ 909.864074] env[68638]: _type = "Task" [ 909.864074] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.864535] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-536c39a6-a595-4d52-9758-0cf1e8c59596 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.886372] env[68638]: DEBUG oslo_vmware.api [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Task: {'id': task-2833841, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.897407] env[68638]: DEBUG oslo_concurrency.lockutils [req-9b727248-9021-4739-8ed1-6d98b53f9250 req-f7a371f2-e134-490a-b365-498b9c853f65 service nova] Releasing lock "refresh_cache-53e92f51-9010-4fb2-89e1-9d16a252ef6e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 909.935045] env[68638]: DEBUG nova.network.neutron [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Successfully created port: ab92a49b-2fbf-4108-96cb-3a64ba792c4b {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 909.985697] env[68638]: DEBUG nova.network.neutron [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 910.017070] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5261c000-4fec-7290-4234-16d84f7700ac, 'name': SearchDatastore_Task, 'duration_secs': 0.011102} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.017854] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 910.017854] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 910.017854] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.017854] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 910.018091] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 910.018472] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0c9301d8-f965-4423-badf-786f70b47440 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.030461] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 910.030723] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 910.033476] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5289554-4908-4482-b3d2-7eb7f101487b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.039408] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 910.039408] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d6c2d9-2687-a436-377f-b7ab629907cb" [ 910.039408] env[68638]: _type = "Task" [ 910.039408] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.048045] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d6c2d9-2687-a436-377f-b7ab629907cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.050806] env[68638]: DEBUG nova.compute.manager [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 910.081520] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bd3bcaf-7f85-4353-b480-cf3b4f35ac76 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.090422] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58940376-08a6-44db-b579-e8b532500b73 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.122386] env[68638]: INFO nova.compute.manager [-] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Took 1.34 seconds to deallocate network for instance. [ 910.125069] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa665a8-99ed-454d-8ced-64135aa5122a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.136230] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb6abaed-bea2-4d8f-b19f-1392790d6be1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.142510] env[68638]: DEBUG nova.network.neutron [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Updating instance_info_cache with network_info: [{"id": "c6b422b3-9642-4a7f-a4b7-848cbba4f147", "address": "fa:16:3e:24:d1:a2", "network": {"id": "ad22ed5c-0d03-45c8-8bc4-c4f51dbac4fc", "bridge": "br-int", "label": "tempest-ServersTestJSON-2147381832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98a35cb6ae4d4c8688fb89d7da0b2dd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6b422b3-96", "ovs_interfaceid": "c6b422b3-9642-4a7f-a4b7-848cbba4f147", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.156772] env[68638]: DEBUG nova.compute.provider_tree [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.267788] env[68638]: DEBUG oslo_vmware.api [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Task: {'id': task-2833840, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139125} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.268676] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 910.268676] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 910.268676] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 910.268676] env[68638]: INFO nova.compute.manager [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Took 1.16 seconds to destroy the instance on the hypervisor. [ 910.268893] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 910.270052] env[68638]: DEBUG nova.compute.manager [-] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 910.270052] env[68638]: DEBUG nova.network.neutron [-] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 910.381923] env[68638]: DEBUG oslo_vmware.api [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Task: {'id': task-2833841, 'name': Rename_Task, 'duration_secs': 0.486052} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.382529] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 910.383051] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0f9c2974-c446-4653-ad80-35e3a70b4ddb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.393191] env[68638]: DEBUG oslo_vmware.api [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Waiting for the task: (returnval){ [ 910.393191] env[68638]: value = "task-2833842" [ 910.393191] env[68638]: _type = "Task" [ 910.393191] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.397288] env[68638]: INFO nova.compute.manager [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Took 46.56 seconds to build instance. [ 910.418163] env[68638]: DEBUG oslo_vmware.api [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Task: {'id': task-2833842, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.553393] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d6c2d9-2687-a436-377f-b7ab629907cb, 'name': SearchDatastore_Task, 'duration_secs': 0.009881} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.557373] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8af9acb-1b04-4dee-b1de-b5ff9ca0b5e2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.563762] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 910.563762] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e7173c-49b3-d4c3-ed2a-117986e1469c" [ 910.563762] env[68638]: _type = "Task" [ 910.563762] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.573543] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e7173c-49b3-d4c3-ed2a-117986e1469c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.632331] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 910.644966] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Releasing lock "refresh_cache-d2d30008-5058-4be3-b803-00d8ca4450d5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 910.645443] env[68638]: DEBUG nova.compute.manager [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Instance network_info: |[{"id": "c6b422b3-9642-4a7f-a4b7-848cbba4f147", "address": "fa:16:3e:24:d1:a2", "network": {"id": "ad22ed5c-0d03-45c8-8bc4-c4f51dbac4fc", "bridge": "br-int", "label": "tempest-ServersTestJSON-2147381832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98a35cb6ae4d4c8688fb89d7da0b2dd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6b422b3-96", "ovs_interfaceid": "c6b422b3-9642-4a7f-a4b7-848cbba4f147", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 910.645894] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:d1:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '46e1fc20-2067-4e1a-9812-702772a2c82c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c6b422b3-9642-4a7f-a4b7-848cbba4f147', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 910.654363] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 910.656292] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 910.657894] env[68638]: DEBUG nova.compute.manager [req-12a18397-c440-4642-b61a-ed652cffc4cd req-b4690c62-9a29-46d8-8662-72c723e1e924 service nova] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Received event network-vif-plugged-c6b422b3-9642-4a7f-a4b7-848cbba4f147 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 910.658110] env[68638]: DEBUG oslo_concurrency.lockutils [req-12a18397-c440-4642-b61a-ed652cffc4cd req-b4690c62-9a29-46d8-8662-72c723e1e924 service nova] Acquiring lock "d2d30008-5058-4be3-b803-00d8ca4450d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 910.658342] env[68638]: DEBUG oslo_concurrency.lockutils [req-12a18397-c440-4642-b61a-ed652cffc4cd req-b4690c62-9a29-46d8-8662-72c723e1e924 service nova] Lock "d2d30008-5058-4be3-b803-00d8ca4450d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 910.658526] env[68638]: DEBUG oslo_concurrency.lockutils [req-12a18397-c440-4642-b61a-ed652cffc4cd req-b4690c62-9a29-46d8-8662-72c723e1e924 service nova] Lock "d2d30008-5058-4be3-b803-00d8ca4450d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 910.658697] env[68638]: DEBUG nova.compute.manager [req-12a18397-c440-4642-b61a-ed652cffc4cd req-b4690c62-9a29-46d8-8662-72c723e1e924 service nova] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] No waiting events found dispatching network-vif-plugged-c6b422b3-9642-4a7f-a4b7-848cbba4f147 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 910.658863] env[68638]: WARNING nova.compute.manager [req-12a18397-c440-4642-b61a-ed652cffc4cd req-b4690c62-9a29-46d8-8662-72c723e1e924 service nova] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Received unexpected event network-vif-plugged-c6b422b3-9642-4a7f-a4b7-848cbba4f147 for instance with vm_state building and task_state spawning. [ 910.659032] env[68638]: DEBUG nova.compute.manager [req-12a18397-c440-4642-b61a-ed652cffc4cd req-b4690c62-9a29-46d8-8662-72c723e1e924 service nova] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Received event network-changed-c6b422b3-9642-4a7f-a4b7-848cbba4f147 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 910.659193] env[68638]: DEBUG nova.compute.manager [req-12a18397-c440-4642-b61a-ed652cffc4cd req-b4690c62-9a29-46d8-8662-72c723e1e924 service nova] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Refreshing instance network info cache due to event network-changed-c6b422b3-9642-4a7f-a4b7-848cbba4f147. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 910.659378] env[68638]: DEBUG oslo_concurrency.lockutils [req-12a18397-c440-4642-b61a-ed652cffc4cd req-b4690c62-9a29-46d8-8662-72c723e1e924 service nova] Acquiring lock "refresh_cache-d2d30008-5058-4be3-b803-00d8ca4450d5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.659544] env[68638]: DEBUG oslo_concurrency.lockutils [req-12a18397-c440-4642-b61a-ed652cffc4cd req-b4690c62-9a29-46d8-8662-72c723e1e924 service nova] Acquired lock "refresh_cache-d2d30008-5058-4be3-b803-00d8ca4450d5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 910.659692] env[68638]: DEBUG nova.network.neutron [req-12a18397-c440-4642-b61a-ed652cffc4cd req-b4690c62-9a29-46d8-8662-72c723e1e924 service nova] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Refreshing network info cache for port c6b422b3-9642-4a7f-a4b7-848cbba4f147 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 910.660778] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-296ac744-4cc1-44b2-ace5-158dd37683a9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.676700] env[68638]: DEBUG nova.scheduler.client.report [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 910.688997] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 910.688997] env[68638]: value = "task-2833843" [ 910.688997] env[68638]: _type = "Task" [ 910.688997] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.699141] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833843, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.907613] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbe88e1-4837-464d-a8ff-18b86517c0f8 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "9975e756-b571-4e70-ba50-a6001d0b064c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.143s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 910.907955] env[68638]: DEBUG oslo_vmware.api [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Task: {'id': task-2833842, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.030327] env[68638]: DEBUG nova.network.neutron [-] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.061460] env[68638]: DEBUG nova.compute.manager [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 911.076018] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e7173c-49b3-d4c3-ed2a-117986e1469c, 'name': SearchDatastore_Task, 'duration_secs': 0.012241} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.076631] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 911.076907] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 53e92f51-9010-4fb2-89e1-9d16a252ef6e/53e92f51-9010-4fb2-89e1-9d16a252ef6e.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 911.077203] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b5f1124c-9e9d-44e7-a5b1-5bdfac57d00e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.088078] env[68638]: DEBUG nova.virt.hardware [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 911.088335] env[68638]: DEBUG nova.virt.hardware [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 911.088493] env[68638]: DEBUG nova.virt.hardware [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 911.088707] env[68638]: DEBUG nova.virt.hardware [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 911.088857] env[68638]: DEBUG nova.virt.hardware [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 911.089010] env[68638]: DEBUG nova.virt.hardware [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 911.089238] env[68638]: DEBUG nova.virt.hardware [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 911.089470] env[68638]: DEBUG nova.virt.hardware [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 911.089868] env[68638]: DEBUG nova.virt.hardware [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 911.090448] env[68638]: DEBUG nova.virt.hardware [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 911.090703] env[68638]: DEBUG nova.virt.hardware [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 911.092333] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-642d2995-818d-437f-b539-ddcc11f8a92e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.097165] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 911.097165] env[68638]: value = "task-2833844" [ 911.097165] env[68638]: _type = "Task" [ 911.097165] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.104143] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d3042ef-6ecd-4484-bd8b-c68b738c6408 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.111513] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833844, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.188739] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.149s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.192044] env[68638]: DEBUG oslo_concurrency.lockutils [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.967s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.192715] env[68638]: DEBUG nova.objects.instance [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lazy-loading 'resources' on Instance uuid 63669b15-2ec8-4a0d-b772-6ef7407e8ebf {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 911.206491] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833843, 'name': CreateVM_Task, 'duration_secs': 0.406514} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.211020] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 911.211020] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.211020] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 911.211020] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 911.211020] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3bffbfaa-ab8f-4238-a64b-958b6d0a4f65 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.218629] env[68638]: DEBUG oslo_vmware.api [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 911.218629] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52f2e21a-f482-fa03-1d1c-6a22c8ba290f" [ 911.218629] env[68638]: _type = "Task" [ 911.218629] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.219604] env[68638]: INFO nova.scheduler.client.report [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Deleted allocations for instance 17f6cd0a-bbc1-47c3-9c36-2166ba448de2 [ 911.234793] env[68638]: DEBUG oslo_vmware.api [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f2e21a-f482-fa03-1d1c-6a22c8ba290f, 'name': SearchDatastore_Task, 'duration_secs': 0.013097} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.235112] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 911.235367] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 911.235633] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.237017] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 911.237017] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 911.237017] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-809dec28-5894-498a-a72e-295d457df641 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.246229] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 911.246412] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 911.247450] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd11d7df-464b-4f77-91c5-20383ee8f9ed {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.256399] env[68638]: DEBUG oslo_vmware.api [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 911.256399] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52c0c34e-4604-79cf-bc32-09348d64d64e" [ 911.256399] env[68638]: _type = "Task" [ 911.256399] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.265430] env[68638]: DEBUG oslo_vmware.api [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52c0c34e-4604-79cf-bc32-09348d64d64e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.412647] env[68638]: DEBUG oslo_vmware.api [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Task: {'id': task-2833842, 'name': PowerOnVM_Task, 'duration_secs': 0.676674} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.413278] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 911.413599] env[68638]: INFO nova.compute.manager [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Took 8.55 seconds to spawn the instance on the hypervisor. [ 911.413876] env[68638]: DEBUG nova.compute.manager [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 911.415143] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0b169e4-fb5b-4196-95ab-adebd88dc850 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.431179] env[68638]: DEBUG nova.compute.manager [req-2cd9e703-a858-4178-ac74-070606f279f4 req-ad724ab5-d808-453b-951c-3f397b2d4025 service nova] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Received event network-vif-plugged-ab92a49b-2fbf-4108-96cb-3a64ba792c4b {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 911.431429] env[68638]: DEBUG oslo_concurrency.lockutils [req-2cd9e703-a858-4178-ac74-070606f279f4 req-ad724ab5-d808-453b-951c-3f397b2d4025 service nova] Acquiring lock "9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.431718] env[68638]: DEBUG oslo_concurrency.lockutils [req-2cd9e703-a858-4178-ac74-070606f279f4 req-ad724ab5-d808-453b-951c-3f397b2d4025 service nova] Lock "9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.436247] env[68638]: DEBUG oslo_concurrency.lockutils [req-2cd9e703-a858-4178-ac74-070606f279f4 req-ad724ab5-d808-453b-951c-3f397b2d4025 service nova] Lock "9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.436803] env[68638]: DEBUG nova.compute.manager [req-2cd9e703-a858-4178-ac74-070606f279f4 req-ad724ab5-d808-453b-951c-3f397b2d4025 service nova] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] No waiting events found dispatching network-vif-plugged-ab92a49b-2fbf-4108-96cb-3a64ba792c4b {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 911.436803] env[68638]: WARNING nova.compute.manager [req-2cd9e703-a858-4178-ac74-070606f279f4 req-ad724ab5-d808-453b-951c-3f397b2d4025 service nova] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Received unexpected event network-vif-plugged-ab92a49b-2fbf-4108-96cb-3a64ba792c4b for instance with vm_state building and task_state spawning. [ 911.496738] env[68638]: DEBUG nova.network.neutron [req-12a18397-c440-4642-b61a-ed652cffc4cd req-b4690c62-9a29-46d8-8662-72c723e1e924 service nova] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Updated VIF entry in instance network info cache for port c6b422b3-9642-4a7f-a4b7-848cbba4f147. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 911.496738] env[68638]: DEBUG nova.network.neutron [req-12a18397-c440-4642-b61a-ed652cffc4cd req-b4690c62-9a29-46d8-8662-72c723e1e924 service nova] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Updating instance_info_cache with network_info: [{"id": "c6b422b3-9642-4a7f-a4b7-848cbba4f147", "address": "fa:16:3e:24:d1:a2", "network": {"id": "ad22ed5c-0d03-45c8-8bc4-c4f51dbac4fc", "bridge": "br-int", "label": "tempest-ServersTestJSON-2147381832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98a35cb6ae4d4c8688fb89d7da0b2dd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6b422b3-96", "ovs_interfaceid": "c6b422b3-9642-4a7f-a4b7-848cbba4f147", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.533061] env[68638]: INFO nova.compute.manager [-] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Took 1.26 seconds to deallocate network for instance. [ 911.547454] env[68638]: DEBUG nova.network.neutron [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Successfully updated port: ab92a49b-2fbf-4108-96cb-3a64ba792c4b {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 911.609032] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833844, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506156} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.609032] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 53e92f51-9010-4fb2-89e1-9d16a252ef6e/53e92f51-9010-4fb2-89e1-9d16a252ef6e.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 911.609249] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 911.609735] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4ff80441-d81b-4526-8033-ea91521d62cd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.617644] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 911.617644] env[68638]: value = "task-2833845" [ 911.617644] env[68638]: _type = "Task" [ 911.617644] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.626137] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833845, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.735914] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5d2b61b3-8736-4080-8436-95ec3d2302ff tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "17f6cd0a-bbc1-47c3-9c36-2166ba448de2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.936s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.769907] env[68638]: DEBUG oslo_vmware.api [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52c0c34e-4604-79cf-bc32-09348d64d64e, 'name': SearchDatastore_Task, 'duration_secs': 0.011065} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.773650] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f6c886d-41fa-4476-a665-14ec7c8d2ca7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.788992] env[68638]: DEBUG oslo_vmware.api [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 911.788992] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]522499f7-cb6d-a983-bc0c-0f32bc2360f3" [ 911.788992] env[68638]: _type = "Task" [ 911.788992] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.797699] env[68638]: DEBUG oslo_vmware.api [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]522499f7-cb6d-a983-bc0c-0f32bc2360f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.947619] env[68638]: INFO nova.compute.manager [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Took 41.75 seconds to build instance. [ 912.000198] env[68638]: DEBUG oslo_concurrency.lockutils [req-12a18397-c440-4642-b61a-ed652cffc4cd req-b4690c62-9a29-46d8-8662-72c723e1e924 service nova] Releasing lock "refresh_cache-d2d30008-5058-4be3-b803-00d8ca4450d5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 912.000322] env[68638]: DEBUG nova.compute.manager [req-12a18397-c440-4642-b61a-ed652cffc4cd req-b4690c62-9a29-46d8-8662-72c723e1e924 service nova] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Received event network-vif-deleted-5c77a676-4e49-4865-adc5-f84b63c42854 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 912.000413] env[68638]: INFO nova.compute.manager [req-12a18397-c440-4642-b61a-ed652cffc4cd req-b4690c62-9a29-46d8-8662-72c723e1e924 service nova] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Neutron deleted interface 5c77a676-4e49-4865-adc5-f84b63c42854; detaching it from the instance and deleting it from the info cache [ 912.000573] env[68638]: DEBUG nova.network.neutron [req-12a18397-c440-4642-b61a-ed652cffc4cd req-b4690c62-9a29-46d8-8662-72c723e1e924 service nova] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.041490] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 912.049799] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquiring lock "refresh_cache-9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.050013] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquired lock "refresh_cache-9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.050089] env[68638]: DEBUG nova.network.neutron [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 912.081380] env[68638]: DEBUG nova.compute.manager [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 912.083451] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73f1cc2f-f2b8-49c2-96ca-742210826807 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.120010] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ef3714-69a2-4768-b379-37ef70971cdb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.135862] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-690b50a9-55e8-4877-bf3d-aba367885f4c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.139098] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833845, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068035} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.139922] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 912.141290] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2754d585-03ff-4753-ae8c-4e2f9be0ed5b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.169717] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30f0db49-7062-46ca-aacd-909c87381816 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.192861] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] 53e92f51-9010-4fb2-89e1-9d16a252ef6e/53e92f51-9010-4fb2-89e1-9d16a252ef6e.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 912.194123] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88b3c5f1-092c-451f-9451-b65983b93797 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.212451] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-745e7e94-a321-4a0c-9d41-c687fd7bc5c3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.217695] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 912.217695] env[68638]: value = "task-2833847" [ 912.217695] env[68638]: _type = "Task" [ 912.217695] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.231226] env[68638]: DEBUG nova.compute.provider_tree [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.240044] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833847, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.305254] env[68638]: DEBUG oslo_vmware.api [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]522499f7-cb6d-a983-bc0c-0f32bc2360f3, 'name': SearchDatastore_Task, 'duration_secs': 0.011308} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.305725] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 912.306185] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] d2d30008-5058-4be3-b803-00d8ca4450d5/d2d30008-5058-4be3-b803-00d8ca4450d5.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 912.306603] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0e13cc2e-5518-46db-9ba1-d07c7a0593b6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.317896] env[68638]: DEBUG oslo_vmware.api [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 912.317896] env[68638]: value = "task-2833848" [ 912.317896] env[68638]: _type = "Task" [ 912.317896] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.325157] env[68638]: DEBUG oslo_vmware.api [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833848, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.450433] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15637b4b-9e94-4863-96c4-0b1ea02d72af tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Lock "1bc685aa-4e88-402f-b581-d179706b12a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.298s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.505724] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3f5f258f-bae8-4246-99e1-867bc8b06af5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.517468] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef1f256d-f7b4-4c82-8b97-7a18932b6ee5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.566114] env[68638]: DEBUG nova.compute.manager [req-12a18397-c440-4642-b61a-ed652cffc4cd req-b4690c62-9a29-46d8-8662-72c723e1e924 service nova] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Detach interface failed, port_id=5c77a676-4e49-4865-adc5-f84b63c42854, reason: Instance fd6d5951-f2a1-422d-b137-4d19759f9060 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 912.597719] env[68638]: INFO nova.compute.manager [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] instance snapshotting [ 912.601299] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8984e73c-deca-4073-94fb-94c71adf1e8a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.624635] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f121be37-b5da-4659-8f71-455da2209aa8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.628280] env[68638]: DEBUG nova.network.neutron [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 912.730584] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833847, 'name': ReconfigVM_Task, 'duration_secs': 0.31566} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.731025] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Reconfigured VM instance instance-0000004b to attach disk [datastore2] 53e92f51-9010-4fb2-89e1-9d16a252ef6e/53e92f51-9010-4fb2-89e1-9d16a252ef6e.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 912.731889] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e30b614f-567a-4ef6-998a-e517d197e02c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.734977] env[68638]: DEBUG nova.scheduler.client.report [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 912.748346] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 912.748346] env[68638]: value = "task-2833849" [ 912.748346] env[68638]: _type = "Task" [ 912.748346] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.763304] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833849, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.829847] env[68638]: DEBUG oslo_vmware.api [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833848, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.864010] env[68638]: DEBUG nova.network.neutron [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Updating instance_info_cache with network_info: [{"id": "ab92a49b-2fbf-4108-96cb-3a64ba792c4b", "address": "fa:16:3e:62:b5:b8", "network": {"id": "2169592a-fe21-46a9-8c91-c7e04f04504e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1092388301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "46bace7ece424608bf9f88293ba6364c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b36c5ae6-c344-4bd1-8239-29128e2bbfbf", "external-id": "nsx-vlan-transportzone-214", "segmentation_id": 214, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab92a49b-2f", "ovs_interfaceid": "ab92a49b-2fbf-4108-96cb-3a64ba792c4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.141167] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Creating Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 913.141522] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-fb70dbfc-30e4-4d24-a97b-41a0491d24f6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.150751] env[68638]: DEBUG oslo_vmware.api [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 913.150751] env[68638]: value = "task-2833850" [ 913.150751] env[68638]: _type = "Task" [ 913.150751] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.160037] env[68638]: DEBUG oslo_vmware.api [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833850, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.242675] env[68638]: DEBUG oslo_concurrency.lockutils [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.051s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.245266] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.099s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.247076] env[68638]: INFO nova.compute.claims [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 913.261396] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833849, 'name': Rename_Task, 'duration_secs': 0.223242} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.261859] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 913.262277] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3b8dbe30-7caa-4244-93c4-4544f491d8a3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.271812] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 913.271812] env[68638]: value = "task-2833851" [ 913.271812] env[68638]: _type = "Task" [ 913.271812] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.280173] env[68638]: INFO nova.scheduler.client.report [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Deleted allocations for instance 63669b15-2ec8-4a0d-b772-6ef7407e8ebf [ 913.291995] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833851, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.333802] env[68638]: DEBUG oslo_vmware.api [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833848, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.548088} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.334279] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] d2d30008-5058-4be3-b803-00d8ca4450d5/d2d30008-5058-4be3-b803-00d8ca4450d5.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 913.334630] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 913.335018] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-529e2536-0444-434f-ab32-7b056d06c6f1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.344332] env[68638]: DEBUG oslo_vmware.api [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 913.344332] env[68638]: value = "task-2833852" [ 913.344332] env[68638]: _type = "Task" [ 913.344332] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.358624] env[68638]: DEBUG oslo_vmware.api [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833852, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.368985] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Releasing lock "refresh_cache-9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.369347] env[68638]: DEBUG nova.compute.manager [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Instance network_info: |[{"id": "ab92a49b-2fbf-4108-96cb-3a64ba792c4b", "address": "fa:16:3e:62:b5:b8", "network": {"id": "2169592a-fe21-46a9-8c91-c7e04f04504e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1092388301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "46bace7ece424608bf9f88293ba6364c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b36c5ae6-c344-4bd1-8239-29128e2bbfbf", "external-id": "nsx-vlan-transportzone-214", "segmentation_id": 214, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab92a49b-2f", "ovs_interfaceid": "ab92a49b-2fbf-4108-96cb-3a64ba792c4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 913.369801] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:b5:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b36c5ae6-c344-4bd1-8239-29128e2bbfbf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ab92a49b-2fbf-4108-96cb-3a64ba792c4b', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 913.378788] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 913.379560] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 913.379896] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-29098255-54e3-4907-8a6d-f25247ce1e42 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.406977] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 913.406977] env[68638]: value = "task-2833853" [ 913.406977] env[68638]: _type = "Task" [ 913.406977] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.419120] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833853, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.488080] env[68638]: DEBUG nova.compute.manager [req-33e27b29-4fe1-4a4e-abbb-1f3910b31680 req-4701d7f1-00e3-4b8d-bb54-b99600d4d45c service nova] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Received event network-changed-99d48199-ae21-4f20-8c41-f96a59bcf89b {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 913.488080] env[68638]: DEBUG nova.compute.manager [req-33e27b29-4fe1-4a4e-abbb-1f3910b31680 req-4701d7f1-00e3-4b8d-bb54-b99600d4d45c service nova] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Refreshing instance network info cache due to event network-changed-99d48199-ae21-4f20-8c41-f96a59bcf89b. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 913.489047] env[68638]: DEBUG oslo_concurrency.lockutils [req-33e27b29-4fe1-4a4e-abbb-1f3910b31680 req-4701d7f1-00e3-4b8d-bb54-b99600d4d45c service nova] Acquiring lock "refresh_cache-1bc685aa-4e88-402f-b581-d179706b12a5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.489047] env[68638]: DEBUG oslo_concurrency.lockutils [req-33e27b29-4fe1-4a4e-abbb-1f3910b31680 req-4701d7f1-00e3-4b8d-bb54-b99600d4d45c service nova] Acquired lock "refresh_cache-1bc685aa-4e88-402f-b581-d179706b12a5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 913.489047] env[68638]: DEBUG nova.network.neutron [req-33e27b29-4fe1-4a4e-abbb-1f3910b31680 req-4701d7f1-00e3-4b8d-bb54-b99600d4d45c service nova] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Refreshing network info cache for port 99d48199-ae21-4f20-8c41-f96a59bcf89b {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 913.587750] env[68638]: DEBUG nova.compute.manager [req-5543ba6a-7f2d-43de-91f2-79b1c0de9a48 req-5e2ac694-4521-4dd0-b931-f78a4c1a5b82 service nova] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Received event network-changed-ab92a49b-2fbf-4108-96cb-3a64ba792c4b {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 913.587843] env[68638]: DEBUG nova.compute.manager [req-5543ba6a-7f2d-43de-91f2-79b1c0de9a48 req-5e2ac694-4521-4dd0-b931-f78a4c1a5b82 service nova] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Refreshing instance network info cache due to event network-changed-ab92a49b-2fbf-4108-96cb-3a64ba792c4b. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 913.588247] env[68638]: DEBUG oslo_concurrency.lockutils [req-5543ba6a-7f2d-43de-91f2-79b1c0de9a48 req-5e2ac694-4521-4dd0-b931-f78a4c1a5b82 service nova] Acquiring lock "refresh_cache-9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.588441] env[68638]: DEBUG oslo_concurrency.lockutils [req-5543ba6a-7f2d-43de-91f2-79b1c0de9a48 req-5e2ac694-4521-4dd0-b931-f78a4c1a5b82 service nova] Acquired lock "refresh_cache-9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 913.588618] env[68638]: DEBUG nova.network.neutron [req-5543ba6a-7f2d-43de-91f2-79b1c0de9a48 req-5e2ac694-4521-4dd0-b931-f78a4c1a5b82 service nova] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Refreshing network info cache for port ab92a49b-2fbf-4108-96cb-3a64ba792c4b {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 913.661498] env[68638]: DEBUG oslo_vmware.api [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833850, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.782388] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833851, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.796745] env[68638]: DEBUG oslo_concurrency.lockutils [None req-252791f4-8d82-426a-a1c8-684b56a27841 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lock "63669b15-2ec8-4a0d-b772-6ef7407e8ebf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.398s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.856021] env[68638]: DEBUG oslo_vmware.api [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833852, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069402} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.856115] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 913.857079] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a402118-49ef-45cf-a5ae-13cb795e95b5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.882615] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] d2d30008-5058-4be3-b803-00d8ca4450d5/d2d30008-5058-4be3-b803-00d8ca4450d5.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 913.883801] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f820905-66d5-4f02-95ca-29dc358d384b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.903555] env[68638]: DEBUG oslo_vmware.api [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 913.903555] env[68638]: value = "task-2833854" [ 913.903555] env[68638]: _type = "Task" [ 913.903555] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.914458] env[68638]: DEBUG oslo_vmware.api [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833854, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.920816] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833853, 'name': CreateVM_Task, 'duration_secs': 0.404678} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.922258] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 913.925332] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.925504] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 913.925827] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 913.926384] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df1eacda-4cb3-4ddf-97c6-49d9f5884273 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.934338] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 913.934338] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5279dbf8-36b7-7eeb-8f03-3f0f48f32ddc" [ 913.934338] env[68638]: _type = "Task" [ 913.934338] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.947820] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5279dbf8-36b7-7eeb-8f03-3f0f48f32ddc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.171483] env[68638]: DEBUG oslo_vmware.api [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833850, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.286844] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833851, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.313896] env[68638]: DEBUG nova.network.neutron [req-33e27b29-4fe1-4a4e-abbb-1f3910b31680 req-4701d7f1-00e3-4b8d-bb54-b99600d4d45c service nova] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Updated VIF entry in instance network info cache for port 99d48199-ae21-4f20-8c41-f96a59bcf89b. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 914.314354] env[68638]: DEBUG nova.network.neutron [req-33e27b29-4fe1-4a4e-abbb-1f3910b31680 req-4701d7f1-00e3-4b8d-bb54-b99600d4d45c service nova] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Updating instance_info_cache with network_info: [{"id": "99d48199-ae21-4f20-8c41-f96a59bcf89b", "address": "fa:16:3e:e4:a3:b9", "network": {"id": "16e43814-e88b-4088-86c9-adf3cc9f4d54", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1799385003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3f84fb9b3b0442d89b45cc44b0eda16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d6fab536-1e48-4d07-992a-076f0e6d089c", "external-id": "nsx-vlan-transportzone-61", "segmentation_id": 61, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99d48199-ae", "ovs_interfaceid": "99d48199-ae21-4f20-8c41-f96a59bcf89b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.415698] env[68638]: DEBUG nova.network.neutron [req-5543ba6a-7f2d-43de-91f2-79b1c0de9a48 req-5e2ac694-4521-4dd0-b931-f78a4c1a5b82 service nova] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Updated VIF entry in instance network info cache for port ab92a49b-2fbf-4108-96cb-3a64ba792c4b. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 914.415977] env[68638]: DEBUG nova.network.neutron [req-5543ba6a-7f2d-43de-91f2-79b1c0de9a48 req-5e2ac694-4521-4dd0-b931-f78a4c1a5b82 service nova] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Updating instance_info_cache with network_info: [{"id": "ab92a49b-2fbf-4108-96cb-3a64ba792c4b", "address": "fa:16:3e:62:b5:b8", "network": {"id": "2169592a-fe21-46a9-8c91-c7e04f04504e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1092388301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "46bace7ece424608bf9f88293ba6364c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b36c5ae6-c344-4bd1-8239-29128e2bbfbf", "external-id": "nsx-vlan-transportzone-214", "segmentation_id": 214, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab92a49b-2f", "ovs_interfaceid": "ab92a49b-2fbf-4108-96cb-3a64ba792c4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.421318] env[68638]: DEBUG oslo_vmware.api [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833854, 'name': ReconfigVM_Task, 'duration_secs': 0.334851} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.424775] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Reconfigured VM instance instance-0000004c to attach disk [datastore1] d2d30008-5058-4be3-b803-00d8ca4450d5/d2d30008-5058-4be3-b803-00d8ca4450d5.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 914.426036] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-653536ff-ffe7-4429-a126-ca4151bc30dc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.429853] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquiring lock "c80895d5-1a59-4779-9da9-9aeec10bc395" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.430328] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "c80895d5-1a59-4779-9da9-9aeec10bc395" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.430328] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquiring lock "c80895d5-1a59-4779-9da9-9aeec10bc395-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.430470] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "c80895d5-1a59-4779-9da9-9aeec10bc395-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.430631] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "c80895d5-1a59-4779-9da9-9aeec10bc395-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.435103] env[68638]: DEBUG oslo_vmware.api [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 914.435103] env[68638]: value = "task-2833855" [ 914.435103] env[68638]: _type = "Task" [ 914.435103] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.439713] env[68638]: INFO nova.compute.manager [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Terminating instance [ 914.452418] env[68638]: DEBUG oslo_concurrency.lockutils [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Acquiring lock "333d88b6-2182-4e9c-9430-058e67921828" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.452698] env[68638]: DEBUG oslo_concurrency.lockutils [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lock "333d88b6-2182-4e9c-9430-058e67921828" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.452911] env[68638]: DEBUG oslo_concurrency.lockutils [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Acquiring lock "333d88b6-2182-4e9c-9430-058e67921828-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.453107] env[68638]: DEBUG oslo_concurrency.lockutils [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lock "333d88b6-2182-4e9c-9430-058e67921828-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.453284] env[68638]: DEBUG oslo_concurrency.lockutils [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lock "333d88b6-2182-4e9c-9430-058e67921828-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.464568] env[68638]: DEBUG oslo_vmware.api [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833855, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.464568] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5279dbf8-36b7-7eeb-8f03-3f0f48f32ddc, 'name': SearchDatastore_Task, 'duration_secs': 0.023868} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.464942] env[68638]: INFO nova.compute.manager [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Terminating instance [ 914.470039] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 914.470576] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 914.470576] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.470576] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.471019] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 914.473570] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17c00ea3-fa0e-471c-97da-5347acfa21f1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.490017] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 914.490017] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 914.493378] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a545cc63-8562-4ce5-a96b-ac572bad3037 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.496830] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 914.496830] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e80441-e5c4-2d05-8b0b-9cab182ec8d9" [ 914.496830] env[68638]: _type = "Task" [ 914.496830] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.510387] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e80441-e5c4-2d05-8b0b-9cab182ec8d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.665637] env[68638]: DEBUG oslo_vmware.api [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833850, 'name': CreateSnapshot_Task, 'duration_secs': 1.103976} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.666034] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Created Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 914.667169] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad410643-d389-41bd-b9f2-08e1d9055641 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.783990] env[68638]: DEBUG oslo_vmware.api [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2833851, 'name': PowerOnVM_Task, 'duration_secs': 1.130464} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.786608] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 914.786823] env[68638]: INFO nova.compute.manager [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Took 9.14 seconds to spawn the instance on the hypervisor. [ 914.787015] env[68638]: DEBUG nova.compute.manager [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 914.788172] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bf1e806-0408-4b1d-bfb9-6f7e51fc9039 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.817422] env[68638]: DEBUG oslo_concurrency.lockutils [req-33e27b29-4fe1-4a4e-abbb-1f3910b31680 req-4701d7f1-00e3-4b8d-bb54-b99600d4d45c service nova] Releasing lock "refresh_cache-1bc685aa-4e88-402f-b581-d179706b12a5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 914.823282] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfbf4d3b-3a47-43bf-bfcc-6cfc54104227 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.832023] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b429d67-8fb6-414d-9f39-a59fb8c93806 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.864372] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ba718c-63b1-48c1-ac60-cd19db157889 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.872298] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff2599e-263e-4dd8-8773-7e09646ca512 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.886312] env[68638]: DEBUG nova.compute.provider_tree [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 914.923197] env[68638]: DEBUG oslo_concurrency.lockutils [req-5543ba6a-7f2d-43de-91f2-79b1c0de9a48 req-5e2ac694-4521-4dd0-b931-f78a4c1a5b82 service nova] Releasing lock "refresh_cache-9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 914.950145] env[68638]: DEBUG nova.compute.manager [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 914.950367] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 914.950685] env[68638]: DEBUG oslo_vmware.api [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833855, 'name': Rename_Task, 'duration_secs': 0.191215} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.951415] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c93692f8-2ecc-4fb7-a167-6cd2939f5a15 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.954034] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 914.954299] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9470cc7b-8792-4ace-8a53-fccaa688c1fc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.964654] env[68638]: DEBUG oslo_vmware.api [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 914.964654] env[68638]: value = "task-2833856" [ 914.964654] env[68638]: _type = "Task" [ 914.964654] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.964895] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 914.965186] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-966b5f5d-c9da-4907-abc8-4022be8a27f4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.974947] env[68638]: DEBUG oslo_vmware.api [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833856, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.976118] env[68638]: DEBUG oslo_vmware.api [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for the task: (returnval){ [ 914.976118] env[68638]: value = "task-2833857" [ 914.976118] env[68638]: _type = "Task" [ 914.976118] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.979398] env[68638]: DEBUG nova.compute.manager [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 914.979602] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 914.980360] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1703fd0-b595-4ec2-b19f-72bcbe305f8a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.988148] env[68638]: DEBUG oslo_vmware.api [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833857, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.990238] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 914.990498] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f6c8bdb0-a0ec-4505-9433-7c27cc99b4a1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.997820] env[68638]: DEBUG oslo_vmware.api [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 914.997820] env[68638]: value = "task-2833858" [ 914.997820] env[68638]: _type = "Task" [ 914.997820] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.015168] env[68638]: DEBUG oslo_vmware.api [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833858, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.020502] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e80441-e5c4-2d05-8b0b-9cab182ec8d9, 'name': SearchDatastore_Task, 'duration_secs': 0.012143} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.021701] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6702c49f-71f9-4d81-ab19-faeb63598bbd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.029523] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 915.029523] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52815177-b4f1-4251-e3f1-a9169ceaefa4" [ 915.029523] env[68638]: _type = "Task" [ 915.029523] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.042438] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52815177-b4f1-4251-e3f1-a9169ceaefa4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.188935] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Creating linked-clone VM from snapshot {{(pid=68638) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 915.189461] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7e9a3dd4-57ee-4c4d-ad64-348ac1e41844 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.199340] env[68638]: DEBUG oslo_vmware.api [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 915.199340] env[68638]: value = "task-2833859" [ 915.199340] env[68638]: _type = "Task" [ 915.199340] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.209634] env[68638]: DEBUG oslo_vmware.api [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833859, 'name': CloneVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.310280] env[68638]: INFO nova.compute.manager [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Took 43.48 seconds to build instance. [ 915.388954] env[68638]: DEBUG nova.scheduler.client.report [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 915.478750] env[68638]: DEBUG oslo_vmware.api [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833856, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.487670] env[68638]: DEBUG oslo_vmware.api [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833857, 'name': PowerOffVM_Task, 'duration_secs': 0.260016} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.487942] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 915.488139] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 915.488406] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-885a9fa7-c3bc-4dd3-a6d1-4f65bc28042b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.513926] env[68638]: DEBUG oslo_vmware.api [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833858, 'name': PowerOffVM_Task, 'duration_secs': 0.255014} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.514219] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 915.514473] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 915.514737] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-612fad42-79e3-4dd2-b4e6-ef8043e9c927 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.540585] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52815177-b4f1-4251-e3f1-a9169ceaefa4, 'name': SearchDatastore_Task, 'duration_secs': 0.012688} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.540897] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 915.541413] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9/9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 915.541545] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0b61f11f-db96-4408-9c4d-d63833330d14 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.550211] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 915.550211] env[68638]: value = "task-2833862" [ 915.550211] env[68638]: _type = "Task" [ 915.550211] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.561313] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833862, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.576461] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 915.576807] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 915.577095] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Deleting the datastore file [datastore2] c80895d5-1a59-4779-9da9-9aeec10bc395 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 915.577472] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e313dd9e-91aa-469b-8d9f-6206c2d23792 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.585671] env[68638]: DEBUG oslo_vmware.api [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for the task: (returnval){ [ 915.585671] env[68638]: value = "task-2833863" [ 915.585671] env[68638]: _type = "Task" [ 915.585671] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.596829] env[68638]: DEBUG oslo_vmware.api [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833863, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.598650] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 915.598899] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 915.599088] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Deleting the datastore file [datastore2] 333d88b6-2182-4e9c-9430-058e67921828 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 915.599380] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a5dbc0d-0789-4f94-af47-e3f7ed905652 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.612523] env[68638]: DEBUG oslo_vmware.api [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for the task: (returnval){ [ 915.612523] env[68638]: value = "task-2833864" [ 915.612523] env[68638]: _type = "Task" [ 915.612523] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.621703] env[68638]: DEBUG oslo_vmware.api [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833864, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.710523] env[68638]: DEBUG oslo_vmware.api [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833859, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.812837] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d52ed421-b968-4918-b1e4-80fd5bc8f7f0 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "53e92f51-9010-4fb2-89e1-9d16a252ef6e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.945s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 915.895102] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.650s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 915.895859] env[68638]: DEBUG nova.compute.manager [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 915.899646] env[68638]: DEBUG oslo_concurrency.lockutils [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 30.041s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 915.899893] env[68638]: DEBUG nova.objects.instance [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68638) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 915.980646] env[68638]: DEBUG oslo_vmware.api [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833856, 'name': PowerOnVM_Task, 'duration_secs': 0.919818} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.981121] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 915.981403] env[68638]: INFO nova.compute.manager [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Took 7.56 seconds to spawn the instance on the hypervisor. [ 915.981592] env[68638]: DEBUG nova.compute.manager [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 915.982795] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf35b468-80e4-405c-928e-24f778437d37 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.061109] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833862, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.100121] env[68638]: DEBUG oslo_vmware.api [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Task: {'id': task-2833863, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197454} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.100121] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 916.100121] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 916.100121] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 916.100121] env[68638]: INFO nova.compute.manager [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Took 1.15 seconds to destroy the instance on the hypervisor. [ 916.100121] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 916.100667] env[68638]: DEBUG nova.compute.manager [-] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 916.100667] env[68638]: DEBUG nova.network.neutron [-] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 916.122728] env[68638]: DEBUG oslo_vmware.api [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Task: {'id': task-2833864, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.231354} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.122976] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 916.123225] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 916.123406] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 916.123573] env[68638]: INFO nova.compute.manager [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Took 1.14 seconds to destroy the instance on the hypervisor. [ 916.123806] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 916.124534] env[68638]: DEBUG nova.compute.manager [-] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 916.124534] env[68638]: DEBUG nova.network.neutron [-] [instance: 333d88b6-2182-4e9c-9430-058e67921828] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 916.209540] env[68638]: DEBUG oslo_vmware.api [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833859, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.410460] env[68638]: DEBUG nova.compute.utils [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 916.415254] env[68638]: DEBUG nova.compute.manager [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 916.415254] env[68638]: DEBUG nova.network.neutron [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 916.422491] env[68638]: DEBUG nova.compute.manager [req-53ccbd11-a1da-4f3c-8c8f-605e75baeb16 req-35e341c9-9df8-42a0-bf02-19eb4b5d3662 service nova] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Received event network-vif-deleted-da2caa54-8dc8-4b47-b0d9-9c9f5e6c86cb {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 916.422491] env[68638]: INFO nova.compute.manager [req-53ccbd11-a1da-4f3c-8c8f-605e75baeb16 req-35e341c9-9df8-42a0-bf02-19eb4b5d3662 service nova] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Neutron deleted interface da2caa54-8dc8-4b47-b0d9-9c9f5e6c86cb; detaching it from the instance and deleting it from the info cache [ 916.422491] env[68638]: DEBUG nova.network.neutron [req-53ccbd11-a1da-4f3c-8c8f-605e75baeb16 req-35e341c9-9df8-42a0-bf02-19eb4b5d3662 service nova] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.478472] env[68638]: DEBUG nova.policy [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0ad3d2f42f47497789f24f65dcf85f9a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b0e49266268a4fda9ac23822bb1436a8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 916.510593] env[68638]: INFO nova.compute.manager [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Took 41.66 seconds to build instance. [ 916.530446] env[68638]: DEBUG nova.compute.manager [req-bd3de078-f824-4d77-9457-1abd4910c272 req-b788d935-d2b4-4106-9830-ef27e1181573 service nova] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Received event network-changed-a087b668-2b77-40e4-8a37-af9d56aa8b57 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 916.530644] env[68638]: DEBUG nova.compute.manager [req-bd3de078-f824-4d77-9457-1abd4910c272 req-b788d935-d2b4-4106-9830-ef27e1181573 service nova] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Refreshing instance network info cache due to event network-changed-a087b668-2b77-40e4-8a37-af9d56aa8b57. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 916.531083] env[68638]: DEBUG oslo_concurrency.lockutils [req-bd3de078-f824-4d77-9457-1abd4910c272 req-b788d935-d2b4-4106-9830-ef27e1181573 service nova] Acquiring lock "refresh_cache-53e92f51-9010-4fb2-89e1-9d16a252ef6e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.531083] env[68638]: DEBUG oslo_concurrency.lockutils [req-bd3de078-f824-4d77-9457-1abd4910c272 req-b788d935-d2b4-4106-9830-ef27e1181573 service nova] Acquired lock "refresh_cache-53e92f51-9010-4fb2-89e1-9d16a252ef6e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 916.532873] env[68638]: DEBUG nova.network.neutron [req-bd3de078-f824-4d77-9457-1abd4910c272 req-b788d935-d2b4-4106-9830-ef27e1181573 service nova] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Refreshing network info cache for port a087b668-2b77-40e4-8a37-af9d56aa8b57 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 916.561633] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833862, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.627917} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.561857] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9/9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 916.562096] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 916.562339] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3001f4ed-180c-4002-9711-cb5587d2ca7a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.571355] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 916.571355] env[68638]: value = "task-2833865" [ 916.571355] env[68638]: _type = "Task" [ 916.571355] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.582187] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833865, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.712743] env[68638]: DEBUG oslo_vmware.api [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833859, 'name': CloneVM_Task} progress is 95%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.888883] env[68638]: DEBUG nova.network.neutron [-] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.912595] env[68638]: DEBUG nova.compute.manager [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 916.917485] env[68638]: DEBUG oslo_concurrency.lockutils [None req-db4259a1-a586-45d2-ad48-275f279af6d9 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.918540] env[68638]: DEBUG oslo_concurrency.lockutils [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 28.401s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.918774] env[68638]: DEBUG nova.objects.instance [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68638) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 916.926522] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f50e0e44-ed85-4c2a-a2ad-f0cd1eb2e6b3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.930112] env[68638]: DEBUG nova.network.neutron [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Successfully created port: b541496e-247e-4bbb-bed9-6e9a9aa2a91f {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 916.940884] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b28fb3-eb7c-44bc-b84a-8fec52fa96c0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.992435] env[68638]: DEBUG nova.compute.manager [req-53ccbd11-a1da-4f3c-8c8f-605e75baeb16 req-35e341c9-9df8-42a0-bf02-19eb4b5d3662 service nova] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Detach interface failed, port_id=da2caa54-8dc8-4b47-b0d9-9c9f5e6c86cb, reason: Instance 333d88b6-2182-4e9c-9430-058e67921828 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 917.016174] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d968f688-6887-4b25-bb5c-bfd583e8bf1b tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "d2d30008-5058-4be3-b803-00d8ca4450d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.922s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.089474] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833865, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.245646} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.089742] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 917.090498] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc59a6fc-ea90-4ccb-8a53-4eab643027d6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.128920] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9/9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 917.132909] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-153c1222-7603-43a9-b778-3503ffd293d7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.158221] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 917.158221] env[68638]: value = "task-2833866" [ 917.158221] env[68638]: _type = "Task" [ 917.158221] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.167679] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833866, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.213626] env[68638]: DEBUG oslo_vmware.api [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833859, 'name': CloneVM_Task, 'duration_secs': 1.81429} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.214385] env[68638]: INFO nova.virt.vmwareapi.vmops [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Created linked-clone VM from snapshot [ 917.215336] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a5a869-166f-42be-95de-6ba2b2620ccf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.228570] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Uploading image 54968b0f-571c-4b4c-be55-3b4b458fd6b8 {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 917.253572] env[68638]: DEBUG oslo_vmware.rw_handles [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 917.253572] env[68638]: value = "vm-569946" [ 917.253572] env[68638]: _type = "VirtualMachine" [ 917.253572] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 917.253852] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-851ea21a-2a4a-4c07-9854-a497faa2045d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.261668] env[68638]: DEBUG oslo_vmware.rw_handles [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lease: (returnval){ [ 917.261668] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52a6de86-c5ac-4911-6427-3123072590b3" [ 917.261668] env[68638]: _type = "HttpNfcLease" [ 917.261668] env[68638]: } obtained for exporting VM: (result){ [ 917.261668] env[68638]: value = "vm-569946" [ 917.261668] env[68638]: _type = "VirtualMachine" [ 917.261668] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 917.262025] env[68638]: DEBUG oslo_vmware.api [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the lease: (returnval){ [ 917.262025] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52a6de86-c5ac-4911-6427-3123072590b3" [ 917.262025] env[68638]: _type = "HttpNfcLease" [ 917.262025] env[68638]: } to be ready. {{(pid=68638) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 917.270375] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 917.270375] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52a6de86-c5ac-4911-6427-3123072590b3" [ 917.270375] env[68638]: _type = "HttpNfcLease" [ 917.270375] env[68638]: } is initializing. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 917.300366] env[68638]: DEBUG nova.network.neutron [-] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.358861] env[68638]: DEBUG nova.network.neutron [req-bd3de078-f824-4d77-9457-1abd4910c272 req-b788d935-d2b4-4106-9830-ef27e1181573 service nova] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Updated VIF entry in instance network info cache for port a087b668-2b77-40e4-8a37-af9d56aa8b57. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 917.359242] env[68638]: DEBUG nova.network.neutron [req-bd3de078-f824-4d77-9457-1abd4910c272 req-b788d935-d2b4-4106-9830-ef27e1181573 service nova] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Updating instance_info_cache with network_info: [{"id": "a087b668-2b77-40e4-8a37-af9d56aa8b57", "address": "fa:16:3e:04:7f:c9", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa087b668-2b", "ovs_interfaceid": "a087b668-2b77-40e4-8a37-af9d56aa8b57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.392327] env[68638]: INFO nova.compute.manager [-] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Took 1.27 seconds to deallocate network for instance. [ 917.600048] env[68638]: DEBUG oslo_concurrency.lockutils [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "d2d30008-5058-4be3-b803-00d8ca4450d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 917.600048] env[68638]: DEBUG oslo_concurrency.lockutils [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "d2d30008-5058-4be3-b803-00d8ca4450d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 917.600445] env[68638]: DEBUG oslo_concurrency.lockutils [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "d2d30008-5058-4be3-b803-00d8ca4450d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 917.600703] env[68638]: DEBUG oslo_concurrency.lockutils [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "d2d30008-5058-4be3-b803-00d8ca4450d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 917.601043] env[68638]: DEBUG oslo_concurrency.lockutils [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "d2d30008-5058-4be3-b803-00d8ca4450d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.603058] env[68638]: INFO nova.compute.manager [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Terminating instance [ 917.671544] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833866, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.771553] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 917.771553] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52a6de86-c5ac-4911-6427-3123072590b3" [ 917.771553] env[68638]: _type = "HttpNfcLease" [ 917.771553] env[68638]: } is ready. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 917.771911] env[68638]: DEBUG oslo_vmware.rw_handles [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 917.771911] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52a6de86-c5ac-4911-6427-3123072590b3" [ 917.771911] env[68638]: _type = "HttpNfcLease" [ 917.771911] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 917.772714] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18269b16-4176-4f86-be37-66292d916911 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.781284] env[68638]: DEBUG oslo_vmware.rw_handles [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a098ad-b974-bdd7-f660-074e09d7b808/disk-0.vmdk from lease info. {{(pid=68638) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 917.781560] env[68638]: DEBUG oslo_vmware.rw_handles [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a098ad-b974-bdd7-f660-074e09d7b808/disk-0.vmdk for reading. {{(pid=68638) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 917.838932] env[68638]: INFO nova.compute.manager [-] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Took 1.74 seconds to deallocate network for instance. [ 917.863467] env[68638]: DEBUG oslo_concurrency.lockutils [req-bd3de078-f824-4d77-9457-1abd4910c272 req-b788d935-d2b4-4106-9830-ef27e1181573 service nova] Releasing lock "refresh_cache-53e92f51-9010-4fb2-89e1-9d16a252ef6e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 917.904846] env[68638]: DEBUG oslo_concurrency.lockutils [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 917.934754] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6e8162ba-0b4a-4769-80a4-48a24d86c731 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.941815] env[68638]: DEBUG nova.compute.manager [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 917.944579] env[68638]: DEBUG oslo_concurrency.lockutils [None req-79886045-1b18-4f13-9599-facc6cd4b97a tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.026s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.947950] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.595s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 917.947950] env[68638]: DEBUG nova.objects.instance [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Lazy-loading 'resources' on Instance uuid c07f6e3a-86cf-4584-aa5e-5adc4bf086e3 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 917.977648] env[68638]: DEBUG nova.virt.hardware [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 917.977906] env[68638]: DEBUG nova.virt.hardware [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 917.978080] env[68638]: DEBUG nova.virt.hardware [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 917.978268] env[68638]: DEBUG nova.virt.hardware [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 917.978416] env[68638]: DEBUG nova.virt.hardware [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 917.978562] env[68638]: DEBUG nova.virt.hardware [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 917.979452] env[68638]: DEBUG nova.virt.hardware [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 917.979452] env[68638]: DEBUG nova.virt.hardware [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 917.979452] env[68638]: DEBUG nova.virt.hardware [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 917.979452] env[68638]: DEBUG nova.virt.hardware [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 917.979452] env[68638]: DEBUG nova.virt.hardware [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 917.980865] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6ee37a-4ada-49b3-9caf-607dbb2940ff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.989918] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89dc408a-4108-4a03-afc8-25ff916301e0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.109135] env[68638]: DEBUG nova.compute.manager [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 918.110542] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 918.110542] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa82022-0c17-4483-85be-abde9bebdb06 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.119764] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 918.119852] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a8b155ec-e6d9-445f-8baa-43a28e2e4887 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.128094] env[68638]: DEBUG oslo_vmware.api [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 918.128094] env[68638]: value = "task-2833868" [ 918.128094] env[68638]: _type = "Task" [ 918.128094] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.138333] env[68638]: DEBUG oslo_vmware.api [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833868, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.174666] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833866, 'name': ReconfigVM_Task, 'duration_secs': 0.707138} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.175132] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9/9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 918.176223] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-71b32ac0-86b0-4951-ba3d-6a5d3ab5aafa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.185014] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 918.185014] env[68638]: value = "task-2833869" [ 918.185014] env[68638]: _type = "Task" [ 918.185014] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.198085] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833869, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.347403] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 918.487894] env[68638]: DEBUG nova.compute.manager [req-2b816645-4449-48ac-8dbc-c5f3ce3b96ac req-437d44d3-63fd-4b75-ac69-89b5660b4d9a service nova] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Received event network-vif-plugged-b541496e-247e-4bbb-bed9-6e9a9aa2a91f {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 918.488169] env[68638]: DEBUG oslo_concurrency.lockutils [req-2b816645-4449-48ac-8dbc-c5f3ce3b96ac req-437d44d3-63fd-4b75-ac69-89b5660b4d9a service nova] Acquiring lock "30193a76-a391-4a64-98cc-7e22dcf7218c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 918.488620] env[68638]: DEBUG oslo_concurrency.lockutils [req-2b816645-4449-48ac-8dbc-c5f3ce3b96ac req-437d44d3-63fd-4b75-ac69-89b5660b4d9a service nova] Lock "30193a76-a391-4a64-98cc-7e22dcf7218c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.488861] env[68638]: DEBUG oslo_concurrency.lockutils [req-2b816645-4449-48ac-8dbc-c5f3ce3b96ac req-437d44d3-63fd-4b75-ac69-89b5660b4d9a service nova] Lock "30193a76-a391-4a64-98cc-7e22dcf7218c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.489123] env[68638]: DEBUG nova.compute.manager [req-2b816645-4449-48ac-8dbc-c5f3ce3b96ac req-437d44d3-63fd-4b75-ac69-89b5660b4d9a service nova] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] No waiting events found dispatching network-vif-plugged-b541496e-247e-4bbb-bed9-6e9a9aa2a91f {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 918.489374] env[68638]: WARNING nova.compute.manager [req-2b816645-4449-48ac-8dbc-c5f3ce3b96ac req-437d44d3-63fd-4b75-ac69-89b5660b4d9a service nova] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Received unexpected event network-vif-plugged-b541496e-247e-4bbb-bed9-6e9a9aa2a91f for instance with vm_state building and task_state spawning. [ 918.560052] env[68638]: DEBUG nova.compute.manager [req-d2943b22-3b9c-42b7-88a0-42fa4030f96d req-7fc3e03d-1d2b-47d9-a289-6ed0f3806153 service nova] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Received event network-vif-deleted-d9c26596-0dec-45f8-9efd-781be344a670 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 918.612149] env[68638]: DEBUG nova.network.neutron [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Successfully updated port: b541496e-247e-4bbb-bed9-6e9a9aa2a91f {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 918.649608] env[68638]: DEBUG oslo_vmware.api [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833868, 'name': PowerOffVM_Task, 'duration_secs': 0.351895} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.650367] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 918.650367] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 918.650648] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-427d5ff8-d88b-45e6-962f-7db0bc6113c5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.697318] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833869, 'name': Rename_Task, 'duration_secs': 0.24081} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.697617] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 918.697901] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c39df2e9-73b8-45b8-91d9-a11abc35667e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.714831] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 918.714831] env[68638]: value = "task-2833871" [ 918.714831] env[68638]: _type = "Task" [ 918.714831] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.724361] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833871, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.746110] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 918.746477] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 918.746832] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Deleting the datastore file [datastore1] d2d30008-5058-4be3-b803-00d8ca4450d5 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 918.747206] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9890c8ae-82c0-4397-a30d-cd5d189b9fb0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.760535] env[68638]: DEBUG oslo_vmware.api [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 918.760535] env[68638]: value = "task-2833872" [ 918.760535] env[68638]: _type = "Task" [ 918.760535] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.773134] env[68638]: DEBUG oslo_vmware.api [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833872, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.968238] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b5e62aa-f1bb-4981-89ea-53cc629af736 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.976859] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad86004-b8b7-4709-ae84-3de06fa3351a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.010655] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ab7e6d-4e78-489c-8e94-bbee145b8f83 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.020461] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba370b7-ab9c-4fc3-8fb5-09ed477ca682 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.035472] env[68638]: DEBUG nova.compute.provider_tree [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 919.122311] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Acquiring lock "refresh_cache-30193a76-a391-4a64-98cc-7e22dcf7218c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.122311] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Acquired lock "refresh_cache-30193a76-a391-4a64-98cc-7e22dcf7218c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 919.122311] env[68638]: DEBUG nova.network.neutron [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 919.227467] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833871, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.272424] env[68638]: DEBUG oslo_vmware.api [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2833872, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.26257} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.272741] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 919.272935] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 919.273228] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 919.273516] env[68638]: INFO nova.compute.manager [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Took 1.16 seconds to destroy the instance on the hypervisor. [ 919.273887] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 919.274126] env[68638]: DEBUG nova.compute.manager [-] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 919.274126] env[68638]: DEBUG nova.network.neutron [-] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 919.539140] env[68638]: DEBUG nova.scheduler.client.report [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 919.655038] env[68638]: DEBUG nova.network.neutron [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 919.726695] env[68638]: DEBUG oslo_vmware.api [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833871, 'name': PowerOnVM_Task, 'duration_secs': 0.677345} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.727037] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 919.727259] env[68638]: INFO nova.compute.manager [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Took 8.67 seconds to spawn the instance on the hypervisor. [ 919.727443] env[68638]: DEBUG nova.compute.manager [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 919.728246] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d86adb80-9543-4488-9bb7-cd7391895301 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.825586] env[68638]: DEBUG nova.network.neutron [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Updating instance_info_cache with network_info: [{"id": "b541496e-247e-4bbb-bed9-6e9a9aa2a91f", "address": "fa:16:3e:76:5e:4b", "network": {"id": "ff60caea-8ade-4d28-940c-4b1787dfff4b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1741337158-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b0e49266268a4fda9ac23822bb1436a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb541496e-24", "ovs_interfaceid": "b541496e-247e-4bbb-bed9-6e9a9aa2a91f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.019599] env[68638]: DEBUG nova.network.neutron [-] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.044377] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.097s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.046972] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.590s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.047228] env[68638]: DEBUG nova.objects.instance [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Lazy-loading 'resources' on Instance uuid d49fdd3f-3ad6-4396-811f-67f1ef1f2940 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 920.070701] env[68638]: INFO nova.scheduler.client.report [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Deleted allocations for instance c07f6e3a-86cf-4584-aa5e-5adc4bf086e3 [ 920.246835] env[68638]: INFO nova.compute.manager [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Took 44.31 seconds to build instance. [ 920.328425] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Releasing lock "refresh_cache-30193a76-a391-4a64-98cc-7e22dcf7218c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 920.328787] env[68638]: DEBUG nova.compute.manager [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Instance network_info: |[{"id": "b541496e-247e-4bbb-bed9-6e9a9aa2a91f", "address": "fa:16:3e:76:5e:4b", "network": {"id": "ff60caea-8ade-4d28-940c-4b1787dfff4b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1741337158-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b0e49266268a4fda9ac23822bb1436a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb541496e-24", "ovs_interfaceid": "b541496e-247e-4bbb-bed9-6e9a9aa2a91f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 920.329387] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:5e:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7b4bfde-f109-4f64-adab-e7f06b80685d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b541496e-247e-4bbb-bed9-6e9a9aa2a91f', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 920.339432] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Creating folder: Project (b0e49266268a4fda9ac23822bb1436a8). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 920.340228] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e352960-9584-48f1-81c4-31b1f7b5afc1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.356800] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Created folder: Project (b0e49266268a4fda9ac23822bb1436a8) in parent group-v569734. [ 920.357043] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Creating folder: Instances. Parent ref: group-v569947. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 920.357335] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9f130404-6f97-4fb4-9007-c7dba9b4aa6e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.371282] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Created folder: Instances in parent group-v569947. [ 920.371709] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 920.371709] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 920.371858] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0888493b-1346-49d9-8330-f97be9cf5828 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.397918] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 920.397918] env[68638]: value = "task-2833875" [ 920.397918] env[68638]: _type = "Task" [ 920.397918] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.407640] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833875, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.523042] env[68638]: INFO nova.compute.manager [-] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Took 1.25 seconds to deallocate network for instance. [ 920.579752] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2fd383b4-4408-4d55-aee5-429f3a331557 tempest-ServersListShow298Test-732857702 tempest-ServersListShow298Test-732857702-project-member] Lock "c07f6e3a-86cf-4584-aa5e-5adc4bf086e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.468s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.596116] env[68638]: DEBUG nova.compute.manager [req-341d3d7b-5b1d-43ec-8479-523d39845381 req-630cab3c-f94e-4431-8ec8-b906fadaa9f1 service nova] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Received event network-changed-b541496e-247e-4bbb-bed9-6e9a9aa2a91f {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 920.596116] env[68638]: DEBUG nova.compute.manager [req-341d3d7b-5b1d-43ec-8479-523d39845381 req-630cab3c-f94e-4431-8ec8-b906fadaa9f1 service nova] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Refreshing instance network info cache due to event network-changed-b541496e-247e-4bbb-bed9-6e9a9aa2a91f. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 920.596116] env[68638]: DEBUG oslo_concurrency.lockutils [req-341d3d7b-5b1d-43ec-8479-523d39845381 req-630cab3c-f94e-4431-8ec8-b906fadaa9f1 service nova] Acquiring lock "refresh_cache-30193a76-a391-4a64-98cc-7e22dcf7218c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.596287] env[68638]: DEBUG oslo_concurrency.lockutils [req-341d3d7b-5b1d-43ec-8479-523d39845381 req-630cab3c-f94e-4431-8ec8-b906fadaa9f1 service nova] Acquired lock "refresh_cache-30193a76-a391-4a64-98cc-7e22dcf7218c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 920.597029] env[68638]: DEBUG nova.network.neutron [req-341d3d7b-5b1d-43ec-8479-523d39845381 req-630cab3c-f94e-4431-8ec8-b906fadaa9f1 service nova] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Refreshing network info cache for port b541496e-247e-4bbb-bed9-6e9a9aa2a91f {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 920.749137] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8d0d0711-3410-4c1c-a3fb-42bd85e56d71 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Lock "9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.330s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.803125] env[68638]: INFO nova.compute.manager [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Rebuilding instance [ 920.864371] env[68638]: DEBUG nova.compute.manager [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 920.865726] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b59a2b4d-70e1-41cf-870b-fa7b27976308 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.910277] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833875, 'name': CreateVM_Task, 'duration_secs': 0.454529} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.911233] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 920.911947] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.912121] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 920.912469] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 920.913012] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3618f30d-6161-42e7-a8dc-4d701fe4c5a5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.919073] env[68638]: DEBUG oslo_vmware.api [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Waiting for the task: (returnval){ [ 920.919073] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52a3bcc0-07bf-c59a-c478-94e6a146cfca" [ 920.919073] env[68638]: _type = "Task" [ 920.919073] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.930373] env[68638]: DEBUG oslo_vmware.api [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a3bcc0-07bf-c59a-c478-94e6a146cfca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.005510] env[68638]: INFO nova.compute.manager [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Rescuing [ 921.005772] env[68638]: DEBUG oslo_concurrency.lockutils [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquiring lock "refresh_cache-9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.005928] env[68638]: DEBUG oslo_concurrency.lockutils [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquired lock "refresh_cache-9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 921.006121] env[68638]: DEBUG nova.network.neutron [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 921.020287] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c49ef75-2b8f-4856-884c-c06330990bac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.030113] env[68638]: DEBUG oslo_concurrency.lockutils [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 921.031340] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-080c8814-900d-497c-bee9-ac7afbde9285 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.064054] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7251b1c2-6e69-447c-ad4e-598296f4913e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.072971] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b66fe43c-3cd9-4d8e-b510-fc8b54c373f1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.090776] env[68638]: DEBUG nova.compute.provider_tree [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 921.432828] env[68638]: DEBUG oslo_vmware.api [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a3bcc0-07bf-c59a-c478-94e6a146cfca, 'name': SearchDatastore_Task, 'duration_secs': 0.024107} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.432828] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 921.432828] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 921.432955] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.433284] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 921.433371] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 921.433652] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6df5752c-8c2f-4cc9-b4a1-b29619900148 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.461054] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 921.461307] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 921.462114] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-568ca209-cd1a-4fe1-9113-1b09e5d7ba2a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.470047] env[68638]: DEBUG oslo_vmware.api [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Waiting for the task: (returnval){ [ 921.470047] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52cbed71-0737-b5c0-b8b6-e1c5b4f98903" [ 921.470047] env[68638]: _type = "Task" [ 921.470047] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.481023] env[68638]: DEBUG oslo_vmware.api [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52cbed71-0737-b5c0-b8b6-e1c5b4f98903, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.594444] env[68638]: DEBUG nova.scheduler.client.report [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 921.605528] env[68638]: DEBUG nova.network.neutron [req-341d3d7b-5b1d-43ec-8479-523d39845381 req-630cab3c-f94e-4431-8ec8-b906fadaa9f1 service nova] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Updated VIF entry in instance network info cache for port b541496e-247e-4bbb-bed9-6e9a9aa2a91f. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 921.605891] env[68638]: DEBUG nova.network.neutron [req-341d3d7b-5b1d-43ec-8479-523d39845381 req-630cab3c-f94e-4431-8ec8-b906fadaa9f1 service nova] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Updating instance_info_cache with network_info: [{"id": "b541496e-247e-4bbb-bed9-6e9a9aa2a91f", "address": "fa:16:3e:76:5e:4b", "network": {"id": "ff60caea-8ade-4d28-940c-4b1787dfff4b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1741337158-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b0e49266268a4fda9ac23822bb1436a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb541496e-24", "ovs_interfaceid": "b541496e-247e-4bbb-bed9-6e9a9aa2a91f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.752595] env[68638]: DEBUG nova.network.neutron [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Updating instance_info_cache with network_info: [{"id": "ab92a49b-2fbf-4108-96cb-3a64ba792c4b", "address": "fa:16:3e:62:b5:b8", "network": {"id": "2169592a-fe21-46a9-8c91-c7e04f04504e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1092388301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "46bace7ece424608bf9f88293ba6364c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b36c5ae6-c344-4bd1-8239-29128e2bbfbf", "external-id": "nsx-vlan-transportzone-214", "segmentation_id": 214, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab92a49b-2f", "ovs_interfaceid": "ab92a49b-2fbf-4108-96cb-3a64ba792c4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.883488] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 921.884153] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-81aae8fb-6b6c-4d18-9bae-d522e5d32dcd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.896032] env[68638]: DEBUG oslo_vmware.api [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Waiting for the task: (returnval){ [ 921.896032] env[68638]: value = "task-2833876" [ 921.896032] env[68638]: _type = "Task" [ 921.896032] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.907766] env[68638]: DEBUG oslo_vmware.api [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Task: {'id': task-2833876, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.980695] env[68638]: DEBUG oslo_vmware.api [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52cbed71-0737-b5c0-b8b6-e1c5b4f98903, 'name': SearchDatastore_Task, 'duration_secs': 0.016487} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.981562] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b3e03fe-539d-4c40-b0a6-ab96c8874fe3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.989848] env[68638]: DEBUG oslo_vmware.api [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Waiting for the task: (returnval){ [ 921.989848] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]522bdfeb-be18-643a-847c-831b9e56ce48" [ 921.989848] env[68638]: _type = "Task" [ 921.989848] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.000561] env[68638]: DEBUG oslo_vmware.api [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]522bdfeb-be18-643a-847c-831b9e56ce48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.099623] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.052s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 922.102706] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.038s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.103075] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 922.105062] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 21.637s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.105322] env[68638]: DEBUG nova.objects.instance [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68638) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 922.109029] env[68638]: DEBUG oslo_concurrency.lockutils [req-341d3d7b-5b1d-43ec-8479-523d39845381 req-630cab3c-f94e-4431-8ec8-b906fadaa9f1 service nova] Releasing lock "refresh_cache-30193a76-a391-4a64-98cc-7e22dcf7218c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 922.109164] env[68638]: DEBUG nova.compute.manager [req-341d3d7b-5b1d-43ec-8479-523d39845381 req-630cab3c-f94e-4431-8ec8-b906fadaa9f1 service nova] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Received event network-vif-deleted-c6b422b3-9642-4a7f-a4b7-848cbba4f147 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 922.128988] env[68638]: INFO nova.scheduler.client.report [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Deleted allocations for instance a09c4492-34fd-4010-b547-bfb5b61f252d [ 922.130815] env[68638]: INFO nova.scheduler.client.report [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Deleted allocations for instance d49fdd3f-3ad6-4396-811f-67f1ef1f2940 [ 922.257618] env[68638]: DEBUG oslo_concurrency.lockutils [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Releasing lock "refresh_cache-9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 922.407274] env[68638]: DEBUG oslo_vmware.api [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Task: {'id': task-2833876, 'name': PowerOffVM_Task, 'duration_secs': 0.23043} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.407683] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 922.408492] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 922.408826] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5dd53984-63f0-4c7f-a04a-050bebfbda2c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.418572] env[68638]: DEBUG oslo_vmware.api [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Waiting for the task: (returnval){ [ 922.418572] env[68638]: value = "task-2833877" [ 922.418572] env[68638]: _type = "Task" [ 922.418572] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.431880] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] VM already powered off {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 922.432128] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Volume detach. Driver type: vmdk {{(pid=68638) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 922.432377] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569870', 'volume_id': '4dc574c0-0283-4f21-ac01-f714b10306da', 'name': 'volume-4dc574c0-0283-4f21-ac01-f714b10306da', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '02894a47-59b1-475b-b934-c8d0b6dabc5b', 'attached_at': '', 'detached_at': '', 'volume_id': '4dc574c0-0283-4f21-ac01-f714b10306da', 'serial': '4dc574c0-0283-4f21-ac01-f714b10306da'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 922.433223] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd281dc4-4516-4eef-8ec8-8373a69c751b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.455523] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd5ac338-74ba-434b-8a57-c96ad4cf1ecf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.463924] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e65b0a37-70df-471b-9c01-6e5720f306e8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.483755] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c09d645c-b2b5-400a-a538-f98c05da859e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.505639] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] The volume has not been displaced from its original location: [datastore2] volume-4dc574c0-0283-4f21-ac01-f714b10306da/volume-4dc574c0-0283-4f21-ac01-f714b10306da.vmdk. No consolidation needed. {{(pid=68638) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 922.511868] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Reconfiguring VM instance instance-0000003e to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 922.512841] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d0ae74f5-2926-4bdc-9e74-cd77bed50746 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.530595] env[68638]: DEBUG oslo_vmware.api [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]522bdfeb-be18-643a-847c-831b9e56ce48, 'name': SearchDatastore_Task, 'duration_secs': 0.016311} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.531402] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 922.531739] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 30193a76-a391-4a64-98cc-7e22dcf7218c/30193a76-a391-4a64-98cc-7e22dcf7218c.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 922.532056] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e39d9e30-98f0-41aa-9d09-cce443d97d6f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.538451] env[68638]: DEBUG oslo_vmware.api [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Waiting for the task: (returnval){ [ 922.538451] env[68638]: value = "task-2833878" [ 922.538451] env[68638]: _type = "Task" [ 922.538451] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.543423] env[68638]: DEBUG oslo_vmware.api [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Waiting for the task: (returnval){ [ 922.543423] env[68638]: value = "task-2833879" [ 922.543423] env[68638]: _type = "Task" [ 922.543423] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.550600] env[68638]: DEBUG oslo_vmware.api [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Task: {'id': task-2833878, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.557195] env[68638]: DEBUG oslo_vmware.api [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Task: {'id': task-2833879, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.640790] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a90e1a5e-b321-4f49-803f-c23da36ab3b3 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "a09c4492-34fd-4010-b547-bfb5b61f252d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.531s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 922.642557] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ea0cc71f-3a32-4bb7-9c53-653313ca803a tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Lock "d49fdd3f-3ad6-4396-811f-67f1ef1f2940" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.780s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.053216] env[68638]: DEBUG oslo_vmware.api [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Task: {'id': task-2833878, 'name': ReconfigVM_Task, 'duration_secs': 0.214135} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.053811] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Reconfigured VM instance instance-0000003e to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 923.068217] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37f63580-bb83-4635-b965-66286e8ccf7d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.087480] env[68638]: DEBUG oslo_vmware.api [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Task: {'id': task-2833879, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.5139} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.087799] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 30193a76-a391-4a64-98cc-7e22dcf7218c/30193a76-a391-4a64-98cc-7e22dcf7218c.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 923.088151] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 923.088850] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c54d1a85-2796-4a31-a0a7-90a2e964fe4d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.094905] env[68638]: DEBUG oslo_vmware.api [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Waiting for the task: (returnval){ [ 923.094905] env[68638]: value = "task-2833880" [ 923.094905] env[68638]: _type = "Task" [ 923.094905] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.099643] env[68638]: DEBUG oslo_vmware.api [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Waiting for the task: (returnval){ [ 923.099643] env[68638]: value = "task-2833881" [ 923.099643] env[68638]: _type = "Task" [ 923.099643] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.109579] env[68638]: DEBUG oslo_vmware.api [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Task: {'id': task-2833880, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.116767] env[68638]: DEBUG oslo_concurrency.lockutils [None req-75aedbdc-9ed2-4fe8-bfac-83a2c5de0ee2 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.118101] env[68638]: DEBUG oslo_vmware.api [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Task: {'id': task-2833881, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.122024] env[68638]: DEBUG oslo_concurrency.lockutils [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.381s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.122024] env[68638]: INFO nova.compute.claims [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 923.614955] env[68638]: DEBUG oslo_vmware.api [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Task: {'id': task-2833880, 'name': ReconfigVM_Task, 'duration_secs': 0.226445} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.620152] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569870', 'volume_id': '4dc574c0-0283-4f21-ac01-f714b10306da', 'name': 'volume-4dc574c0-0283-4f21-ac01-f714b10306da', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '02894a47-59b1-475b-b934-c8d0b6dabc5b', 'attached_at': '', 'detached_at': '', 'volume_id': '4dc574c0-0283-4f21-ac01-f714b10306da', 'serial': '4dc574c0-0283-4f21-ac01-f714b10306da'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 923.620152] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 923.620775] env[68638]: DEBUG oslo_vmware.api [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Task: {'id': task-2833881, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090874} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.622662] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e96fca0-4c8c-427c-bbff-ad00d1842f67 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.629500] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 923.632558] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d4adeaa-b8d5-4598-b175-841ef265fc27 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.644591] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 923.662195] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-877fc4a7-ad07-458a-a280-4f38fae3a10f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.676310] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] 30193a76-a391-4a64-98cc-7e22dcf7218c/30193a76-a391-4a64-98cc-7e22dcf7218c.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 923.677609] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0014187-9326-4b59-a81b-6a05c7db5f32 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.703022] env[68638]: DEBUG oslo_vmware.api [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Waiting for the task: (returnval){ [ 923.703022] env[68638]: value = "task-2833883" [ 923.703022] env[68638]: _type = "Task" [ 923.703022] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.710679] env[68638]: DEBUG oslo_vmware.api [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Task: {'id': task-2833883, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.768319] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 923.768728] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 923.768999] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Deleting the datastore file [datastore2] 02894a47-59b1-475b-b934-c8d0b6dabc5b {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 923.769353] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-62c696bf-5a00-48e9-a46c-e6def9de5a0c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.777875] env[68638]: DEBUG oslo_vmware.api [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Waiting for the task: (returnval){ [ 923.777875] env[68638]: value = "task-2833884" [ 923.777875] env[68638]: _type = "Task" [ 923.777875] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.791108] env[68638]: DEBUG oslo_vmware.api [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Task: {'id': task-2833884, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.798034] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 923.798034] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4b699669-4376-4188-bcae-dccaf182b737 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.805991] env[68638]: DEBUG oslo_vmware.api [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 923.805991] env[68638]: value = "task-2833885" [ 923.805991] env[68638]: _type = "Task" [ 923.805991] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.817054] env[68638]: DEBUG oslo_vmware.api [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833885, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.178281] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "7617a7b1-3b21-4d38-b090-1d35bc74637b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.178609] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "7617a7b1-3b21-4d38-b090-1d35bc74637b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.178871] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "7617a7b1-3b21-4d38-b090-1d35bc74637b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.179050] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "7617a7b1-3b21-4d38-b090-1d35bc74637b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.179219] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "7617a7b1-3b21-4d38-b090-1d35bc74637b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.184791] env[68638]: INFO nova.compute.manager [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Terminating instance [ 924.220054] env[68638]: DEBUG oslo_vmware.api [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Task: {'id': task-2833883, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.298499] env[68638]: DEBUG oslo_vmware.api [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Task: {'id': task-2833884, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184631} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.298976] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 924.298976] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 924.299186] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 924.323361] env[68638]: DEBUG oslo_vmware.api [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833885, 'name': PowerOffVM_Task, 'duration_secs': 0.422377} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.323640] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 924.324493] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d3b8b74-2171-4422-94bd-1a7cfe49795c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.349652] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00af2bd2-07ca-43b6-a619-74e5ea50d0aa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.405621] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 924.405951] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d78ae55f-8d6a-4e05-8920-3a0032769db5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.415895] env[68638]: DEBUG oslo_vmware.api [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 924.415895] env[68638]: value = "task-2833886" [ 924.415895] env[68638]: _type = "Task" [ 924.415895] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.424125] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Volume detach. Driver type: vmdk {{(pid=68638) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 924.424784] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-df043b44-f5c2-41cf-9aae-3ced1cb1ab87 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.440155] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] VM already powered off {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 924.440475] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 924.440817] env[68638]: DEBUG oslo_concurrency.lockutils [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.441050] env[68638]: DEBUG oslo_concurrency.lockutils [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 924.441324] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 924.442074] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2b8cbbf8-6936-4729-aa2d-b542abb66e9f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.449117] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4836ae6b-59de-4b7e-bbdc-022430f53c09 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.477225] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 924.477225] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 924.477225] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66757c0b-3033-4e87-8641-0951b444109a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.507454] env[68638]: ERROR nova.compute.manager [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Failed to detach volume 4dc574c0-0283-4f21-ac01-f714b10306da from /dev/sda: nova.exception.InstanceNotFound: Instance 02894a47-59b1-475b-b934-c8d0b6dabc5b could not be found. [ 924.507454] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Traceback (most recent call last): [ 924.507454] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 924.507454] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] self.driver.rebuild(**kwargs) [ 924.507454] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 924.507454] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] raise NotImplementedError() [ 924.507454] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] NotImplementedError [ 924.507454] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] [ 924.507454] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] During handling of the above exception, another exception occurred: [ 924.507454] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] [ 924.507454] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Traceback (most recent call last): [ 924.507454] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 924.507454] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] self.driver.detach_volume(context, old_connection_info, [ 924.507454] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 924.507454] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] return self._volumeops.detach_volume(connection_info, instance) [ 924.507454] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 924.507454] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] self._detach_volume_vmdk(connection_info, instance) [ 924.507454] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 924.507454] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 924.507454] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 924.507454] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] stable_ref.fetch_moref(session) [ 924.507454] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 924.507454] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] raise exception.InstanceNotFound(instance_id=self._uuid) [ 924.507454] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] nova.exception.InstanceNotFound: Instance 02894a47-59b1-475b-b934-c8d0b6dabc5b could not be found. [ 924.507454] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] [ 924.512823] env[68638]: DEBUG oslo_vmware.api [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 924.512823] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d38635-4466-c95a-a761-f0eaccfb6a2b" [ 924.512823] env[68638]: _type = "Task" [ 924.512823] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.523324] env[68638]: DEBUG oslo_vmware.api [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d38635-4466-c95a-a761-f0eaccfb6a2b, 'name': SearchDatastore_Task, 'duration_secs': 0.012483} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.524179] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdbf0805-ee46-4819-bc2a-0089bf3993e7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.534522] env[68638]: DEBUG oslo_vmware.api [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 924.534522] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52638060-d166-b02d-7de1-cfff1cbd8710" [ 924.534522] env[68638]: _type = "Task" [ 924.534522] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.543612] env[68638]: DEBUG oslo_vmware.api [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52638060-d166-b02d-7de1-cfff1cbd8710, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.693681] env[68638]: DEBUG nova.compute.manager [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 924.694510] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 924.695088] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c544a7-6418-44dc-bc63-2b8852c85553 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.704209] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 924.706038] env[68638]: DEBUG nova.compute.utils [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Build of instance 02894a47-59b1-475b-b934-c8d0b6dabc5b aborted: Failed to rebuild volume backed instance. {{(pid=68638) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 924.710197] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-79b8a175-db61-4061-8240-f60bcc0dd8e9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.713252] env[68638]: ERROR nova.compute.manager [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 02894a47-59b1-475b-b934-c8d0b6dabc5b aborted: Failed to rebuild volume backed instance. [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Traceback (most recent call last): [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] self.driver.rebuild(**kwargs) [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] raise NotImplementedError() [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] NotImplementedError [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] During handling of the above exception, another exception occurred: [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Traceback (most recent call last): [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] self._detach_root_volume(context, instance, root_bdm) [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] with excutils.save_and_reraise_exception(): [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] self.force_reraise() [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] raise self.value [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] self.driver.detach_volume(context, old_connection_info, [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] return self._volumeops.detach_volume(connection_info, instance) [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] self._detach_volume_vmdk(connection_info, instance) [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] stable_ref.fetch_moref(session) [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] raise exception.InstanceNotFound(instance_id=self._uuid) [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] nova.exception.InstanceNotFound: Instance 02894a47-59b1-475b-b934-c8d0b6dabc5b could not be found. [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] During handling of the above exception, another exception occurred: [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Traceback (most recent call last): [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/nova/nova/compute/manager.py", line 11390, in _error_out_instance_on_exception [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] yield [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 924.713252] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] self._do_rebuild_instance_with_claim( [ 924.714681] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 924.714681] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] self._do_rebuild_instance( [ 924.714681] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 924.714681] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] self._rebuild_default_impl(**kwargs) [ 924.714681] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 924.714681] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] self._rebuild_volume_backed_instance( [ 924.714681] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 924.714681] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] raise exception.BuildAbortException( [ 924.714681] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] nova.exception.BuildAbortException: Build of instance 02894a47-59b1-475b-b934-c8d0b6dabc5b aborted: Failed to rebuild volume backed instance. [ 924.714681] env[68638]: ERROR nova.compute.manager [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] [ 924.724805] env[68638]: DEBUG oslo_vmware.api [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Task: {'id': task-2833883, 'name': ReconfigVM_Task, 'duration_secs': 0.570022} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.726602] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Reconfigured VM instance instance-0000004e to attach disk [datastore2] 30193a76-a391-4a64-98cc-7e22dcf7218c/30193a76-a391-4a64-98cc-7e22dcf7218c.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 924.727379] env[68638]: DEBUG oslo_vmware.api [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 924.727379] env[68638]: value = "task-2833887" [ 924.727379] env[68638]: _type = "Task" [ 924.727379] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.728143] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c149db09-d9f8-4128-b40f-55043d9f5616 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.737536] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1d39335-8042-41e6-bb51-b8c109aac9f9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.744082] env[68638]: DEBUG oslo_vmware.api [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Waiting for the task: (returnval){ [ 924.744082] env[68638]: value = "task-2833888" [ 924.744082] env[68638]: _type = "Task" [ 924.744082] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.751095] env[68638]: DEBUG oslo_vmware.api [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833887, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.755209] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb15d22-f6ec-45e3-9262-36b933bd951a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.762562] env[68638]: DEBUG oslo_vmware.api [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Task: {'id': task-2833888, 'name': Rename_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.796601] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c63c128d-8528-4017-bf75-a72c76978e38 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.807579] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ed909e-bad8-4493-a426-e1284f0a2ccb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.830138] env[68638]: DEBUG nova.compute.provider_tree [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 925.045948] env[68638]: DEBUG oslo_vmware.api [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52638060-d166-b02d-7de1-cfff1cbd8710, 'name': SearchDatastore_Task, 'duration_secs': 0.011945} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.046316] env[68638]: DEBUG oslo_concurrency.lockutils [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 925.046683] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9-rescue.vmdk. {{(pid=68638) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 925.046997] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-53305de1-3efb-4a6c-9ffd-36c523f6006b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.056054] env[68638]: DEBUG oslo_vmware.api [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 925.056054] env[68638]: value = "task-2833889" [ 925.056054] env[68638]: _type = "Task" [ 925.056054] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.066224] env[68638]: DEBUG oslo_vmware.api [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833889, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.075849] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquiring lock "809416da-af6c-429d-b4b2-5334768aa744" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 925.076449] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Lock "809416da-af6c-429d-b4b2-5334768aa744" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 925.076449] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquiring lock "809416da-af6c-429d-b4b2-5334768aa744-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 925.076696] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Lock "809416da-af6c-429d-b4b2-5334768aa744-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 925.076696] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Lock "809416da-af6c-429d-b4b2-5334768aa744-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.078866] env[68638]: INFO nova.compute.manager [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Terminating instance [ 925.241303] env[68638]: DEBUG oslo_vmware.api [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833887, 'name': PowerOffVM_Task, 'duration_secs': 0.248308} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.241565] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 925.241736] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 925.244022] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-395182aa-cce1-4ac1-b202-3f430437d8df {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.256230] env[68638]: DEBUG oslo_vmware.api [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Task: {'id': task-2833888, 'name': Rename_Task, 'duration_secs': 0.184469} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.256230] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 925.256703] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2d3654af-8f4e-482d-b710-f83e6a2348a3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.265720] env[68638]: DEBUG oslo_vmware.api [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Waiting for the task: (returnval){ [ 925.265720] env[68638]: value = "task-2833891" [ 925.265720] env[68638]: _type = "Task" [ 925.265720] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.338021] env[68638]: DEBUG nova.scheduler.client.report [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 925.345024] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 925.345024] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 925.345024] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Deleting the datastore file [datastore2] 7617a7b1-3b21-4d38-b090-1d35bc74637b {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 925.345024] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ec64d0ce-847e-4880-9aa8-704f0b063284 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.354938] env[68638]: DEBUG oslo_vmware.api [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for the task: (returnval){ [ 925.354938] env[68638]: value = "task-2833892" [ 925.354938] env[68638]: _type = "Task" [ 925.354938] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.369146] env[68638]: DEBUG oslo_vmware.api [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833892, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.569235] env[68638]: DEBUG oslo_vmware.api [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833889, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.584591] env[68638]: DEBUG nova.compute.manager [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 925.585578] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 925.586050] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6f2e648-1782-4fae-8428-b566d1ec50a2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.596402] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 925.597030] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6d496854-3969-4263-adf8-ba472dd8ebd7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.608343] env[68638]: DEBUG oslo_vmware.api [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Waiting for the task: (returnval){ [ 925.608343] env[68638]: value = "task-2833893" [ 925.608343] env[68638]: _type = "Task" [ 925.608343] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.623733] env[68638]: DEBUG oslo_vmware.api [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833893, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.780574] env[68638]: DEBUG oslo_vmware.api [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Task: {'id': task-2833891, 'name': PowerOnVM_Task} progress is 90%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.845149] env[68638]: DEBUG oslo_concurrency.lockutils [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.726s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.847369] env[68638]: DEBUG nova.compute.manager [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 925.849734] env[68638]: DEBUG oslo_concurrency.lockutils [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.697s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 925.850392] env[68638]: DEBUG nova.objects.instance [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Lazy-loading 'resources' on Instance uuid 92c90438-f7cc-4a48-bfac-f7912709cf88 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 925.874476] env[68638]: DEBUG oslo_vmware.api [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Task: {'id': task-2833892, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.447476} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.875170] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 925.875622] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 925.875940] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 925.876244] env[68638]: INFO nova.compute.manager [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Took 1.18 seconds to destroy the instance on the hypervisor. [ 925.876799] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 925.877209] env[68638]: DEBUG nova.compute.manager [-] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 925.877483] env[68638]: DEBUG nova.network.neutron [-] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 926.070290] env[68638]: DEBUG oslo_vmware.api [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833889, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.734753} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.070620] env[68638]: INFO nova.virt.vmwareapi.ds_util [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9-rescue.vmdk. [ 926.071555] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e35337c-91b0-43f2-94e4-400e8d624781 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.106745] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9-rescue.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 926.111015] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5296ab2e-5ffb-4027-8270-cfce4c30ddf7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.133055] env[68638]: DEBUG oslo_vmware.api [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833893, 'name': PowerOffVM_Task, 'duration_secs': 0.388725} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.135828] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 926.135828] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 926.136300] env[68638]: DEBUG oslo_vmware.api [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 926.136300] env[68638]: value = "task-2833894" [ 926.136300] env[68638]: _type = "Task" [ 926.136300] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.136548] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-347aa1f9-4565-4bf0-a996-a674da6f335b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.149961] env[68638]: DEBUG oslo_vmware.api [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833894, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.218983] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 926.219212] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 926.219405] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Deleting the datastore file [datastore2] 809416da-af6c-429d-b4b2-5334768aa744 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 926.219705] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-923df887-e5b9-488f-8cea-d09c0eb0d743 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.232040] env[68638]: DEBUG oslo_vmware.api [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Waiting for the task: (returnval){ [ 926.232040] env[68638]: value = "task-2833896" [ 926.232040] env[68638]: _type = "Task" [ 926.232040] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.247626] env[68638]: DEBUG oslo_vmware.api [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833896, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.264149] env[68638]: DEBUG nova.compute.manager [req-44c5d85b-99dd-45d7-abc7-b6519edefcf6 req-3687769b-ec35-43a6-873a-28c89d126cb1 service nova] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Received event network-vif-deleted-41ce015b-dfb7-4031-a11b-8dfd0e29bb62 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 926.264149] env[68638]: INFO nova.compute.manager [req-44c5d85b-99dd-45d7-abc7-b6519edefcf6 req-3687769b-ec35-43a6-873a-28c89d126cb1 service nova] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Neutron deleted interface 41ce015b-dfb7-4031-a11b-8dfd0e29bb62; detaching it from the instance and deleting it from the info cache [ 926.264246] env[68638]: DEBUG nova.network.neutron [req-44c5d85b-99dd-45d7-abc7-b6519edefcf6 req-3687769b-ec35-43a6-873a-28c89d126cb1 service nova] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.281039] env[68638]: DEBUG oslo_vmware.api [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Task: {'id': task-2833891, 'name': PowerOnVM_Task, 'duration_secs': 0.587769} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.282180] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 926.282469] env[68638]: INFO nova.compute.manager [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Took 8.34 seconds to spawn the instance on the hypervisor. [ 926.282712] env[68638]: DEBUG nova.compute.manager [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 926.284730] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c20ae8fc-8d3f-4e84-a894-b881fd281164 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.359685] env[68638]: DEBUG nova.compute.utils [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 926.362264] env[68638]: DEBUG nova.compute.manager [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 926.362538] env[68638]: DEBUG nova.network.neutron [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 926.420755] env[68638]: DEBUG nova.policy [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '85f963c3ed1444c49a8e482ac59cc0f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6565edffde9f43819387f38c2c375823', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 926.653182] env[68638]: DEBUG oslo_vmware.api [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833894, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.734778] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.743865] env[68638]: DEBUG nova.network.neutron [-] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.753803] env[68638]: DEBUG oslo_vmware.api [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Task: {'id': task-2833896, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154129} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.754093] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 926.754279] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 926.754503] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 926.754739] env[68638]: INFO nova.compute.manager [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Took 1.17 seconds to destroy the instance on the hypervisor. [ 926.755400] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 926.755595] env[68638]: DEBUG nova.compute.manager [-] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 926.755711] env[68638]: DEBUG nova.network.neutron [-] [instance: 809416da-af6c-429d-b4b2-5334768aa744] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 926.768694] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c3412e51-81c4-418b-92c3-aeeac2ebe00c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.783680] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe4da1b-9b3f-4ec0-aadf-b1eed416641c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.800898] env[68638]: DEBUG nova.network.neutron [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Successfully created port: f21435ee-deb0-43b9-8f82-b88274871ba9 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 926.817333] env[68638]: INFO nova.compute.manager [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Took 42.69 seconds to build instance. [ 926.841164] env[68638]: DEBUG nova.compute.manager [req-44c5d85b-99dd-45d7-abc7-b6519edefcf6 req-3687769b-ec35-43a6-873a-28c89d126cb1 service nova] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Detach interface failed, port_id=41ce015b-dfb7-4031-a11b-8dfd0e29bb62, reason: Instance 7617a7b1-3b21-4d38-b090-1d35bc74637b could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 926.855692] env[68638]: DEBUG oslo_vmware.rw_handles [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a098ad-b974-bdd7-f660-074e09d7b808/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 926.856936] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-296186bd-c6e8-4144-985f-525b9cdc3e86 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.867979] env[68638]: DEBUG oslo_vmware.rw_handles [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a098ad-b974-bdd7-f660-074e09d7b808/disk-0.vmdk is in state: ready. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 926.868191] env[68638]: ERROR oslo_vmware.rw_handles [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a098ad-b974-bdd7-f660-074e09d7b808/disk-0.vmdk due to incomplete transfer. [ 926.869081] env[68638]: DEBUG nova.compute.manager [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 926.871244] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-694189e5-ae2e-4612-ab4a-15547bf587a4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.878187] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e31e29b-bbe7-492a-a76d-d9373cbee831 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.895428] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d180898-6f03-4c35-bfc7-f47c465309e3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.898763] env[68638]: DEBUG oslo_vmware.rw_handles [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52a098ad-b974-bdd7-f660-074e09d7b808/disk-0.vmdk. {{(pid=68638) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 926.899013] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Uploaded image 54968b0f-571c-4b4c-be55-3b4b458fd6b8 to the Glance image server {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 926.901460] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Destroying the VM {{(pid=68638) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 926.901739] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-91784cf0-1a97-420d-8b46-00652ba9c1e5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.948178] env[68638]: DEBUG oslo_vmware.api [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 926.948178] env[68638]: value = "task-2833897" [ 926.948178] env[68638]: _type = "Task" [ 926.948178] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.948928] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1c7c034-35bf-455f-8999-a03d2ad31de9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.964264] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab3adf3c-0a7a-4ed4-b906-66ab67722436 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.968294] env[68638]: DEBUG oslo_vmware.api [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833897, 'name': Destroy_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.979645] env[68638]: DEBUG nova.compute.provider_tree [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 927.154281] env[68638]: DEBUG oslo_vmware.api [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833894, 'name': ReconfigVM_Task, 'duration_secs': 0.763014} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.154832] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9-rescue.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 927.157661] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c1c144-4da1-4842-90c5-a1c37ed5992b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.188324] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa45abbe-856e-4292-9e19-30be0133d695 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.209347] env[68638]: DEBUG oslo_vmware.api [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 927.209347] env[68638]: value = "task-2833898" [ 927.209347] env[68638]: _type = "Task" [ 927.209347] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.215099] env[68638]: DEBUG oslo_vmware.api [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833898, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.226789] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb65ada5-a2eb-4f0a-a5bc-85a2e0c834cc tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Acquiring lock "02894a47-59b1-475b-b934-c8d0b6dabc5b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.227088] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb65ada5-a2eb-4f0a-a5bc-85a2e0c834cc tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Lock "02894a47-59b1-475b-b934-c8d0b6dabc5b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.227334] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb65ada5-a2eb-4f0a-a5bc-85a2e0c834cc tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Acquiring lock "02894a47-59b1-475b-b934-c8d0b6dabc5b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.227554] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb65ada5-a2eb-4f0a-a5bc-85a2e0c834cc tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Lock "02894a47-59b1-475b-b934-c8d0b6dabc5b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.227687] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb65ada5-a2eb-4f0a-a5bc-85a2e0c834cc tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Lock "02894a47-59b1-475b-b934-c8d0b6dabc5b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.232129] env[68638]: INFO nova.compute.manager [None req-cb65ada5-a2eb-4f0a-a5bc-85a2e0c834cc tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Terminating instance [ 927.245986] env[68638]: INFO nova.compute.manager [-] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Took 1.37 seconds to deallocate network for instance. [ 927.318538] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e75733dd-593e-4bd9-99fc-0e7f4dda1f1c tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Lock "30193a76-a391-4a64-98cc-7e22dcf7218c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.279s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.382613] env[68638]: DEBUG nova.compute.manager [req-680c2716-3d12-4e98-9c58-17766e0af324 req-16708354-3fef-4749-b008-5f8d7cc191f1 service nova] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Received event network-vif-deleted-7a860c34-618e-494a-9a17-d5a14acf9fb5 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 927.382854] env[68638]: INFO nova.compute.manager [req-680c2716-3d12-4e98-9c58-17766e0af324 req-16708354-3fef-4749-b008-5f8d7cc191f1 service nova] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Neutron deleted interface 7a860c34-618e-494a-9a17-d5a14acf9fb5; detaching it from the instance and deleting it from the info cache [ 927.383608] env[68638]: DEBUG nova.network.neutron [req-680c2716-3d12-4e98-9c58-17766e0af324 req-16708354-3fef-4749-b008-5f8d7cc191f1 service nova] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.462377] env[68638]: DEBUG oslo_vmware.api [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833897, 'name': Destroy_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.482863] env[68638]: DEBUG nova.scheduler.client.report [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 927.630865] env[68638]: DEBUG nova.network.neutron [-] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.724021] env[68638]: DEBUG oslo_vmware.api [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833898, 'name': ReconfigVM_Task, 'duration_secs': 0.33619} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.724540] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 927.724925] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1f35312b-27b3-4909-8a7f-85e1243490e1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.738201] env[68638]: DEBUG nova.compute.manager [None req-cb65ada5-a2eb-4f0a-a5bc-85a2e0c834cc tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 927.738201] env[68638]: DEBUG oslo_vmware.api [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 927.738201] env[68638]: value = "task-2833899" [ 927.738201] env[68638]: _type = "Task" [ 927.738201] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.738201] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a0de06db-0050-4075-9369-7d023bfc2ecf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.748483] env[68638]: DEBUG oslo_vmware.api [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833899, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.755397] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26e62fa7-665a-4811-8e57-044e59923b59 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.772196] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.805633] env[68638]: WARNING nova.virt.vmwareapi.driver [None req-cb65ada5-a2eb-4f0a-a5bc-85a2e0c834cc tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 02894a47-59b1-475b-b934-c8d0b6dabc5b could not be found. [ 927.805888] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cb65ada5-a2eb-4f0a-a5bc-85a2e0c834cc tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 927.807900] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-97961c30-c152-4deb-ba76-cccde7c044b0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.817816] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b58e9c-6d25-4db1-ab2a-6e78e115aac6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.863847] env[68638]: WARNING nova.virt.vmwareapi.vmops [None req-cb65ada5-a2eb-4f0a-a5bc-85a2e0c834cc tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 02894a47-59b1-475b-b934-c8d0b6dabc5b could not be found. [ 927.864483] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cb65ada5-a2eb-4f0a-a5bc-85a2e0c834cc tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 927.864483] env[68638]: INFO nova.compute.manager [None req-cb65ada5-a2eb-4f0a-a5bc-85a2e0c834cc tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Took 0.13 seconds to destroy the instance on the hypervisor. [ 927.864809] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cb65ada5-a2eb-4f0a-a5bc-85a2e0c834cc tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 927.865024] env[68638]: DEBUG nova.compute.manager [-] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 927.865286] env[68638]: DEBUG nova.network.neutron [-] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 927.886986] env[68638]: DEBUG nova.compute.manager [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 927.890863] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-28fca60b-1b05-4df3-88af-be1a9de3739c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.904140] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d248b1ce-0a79-4bbf-96d7-77d6f4da5a96 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.921568] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "0249ffb9-82ed-44db-bb20-e619eaa176dd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.921729] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "0249ffb9-82ed-44db-bb20-e619eaa176dd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.932745] env[68638]: DEBUG nova.virt.hardware [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 927.933027] env[68638]: DEBUG nova.virt.hardware [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 927.933188] env[68638]: DEBUG nova.virt.hardware [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 927.934220] env[68638]: DEBUG nova.virt.hardware [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 927.934220] env[68638]: DEBUG nova.virt.hardware [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 927.934220] env[68638]: DEBUG nova.virt.hardware [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 927.934220] env[68638]: DEBUG nova.virt.hardware [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 927.934220] env[68638]: DEBUG nova.virt.hardware [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 927.934220] env[68638]: DEBUG nova.virt.hardware [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 927.934458] env[68638]: DEBUG nova.virt.hardware [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 927.934655] env[68638]: DEBUG nova.virt.hardware [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 927.936205] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009496c1-5fed-4b46-9d7d-6d70e8403594 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.959385] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a427ddae-b718-457d-aa7e-4b3b915a626d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.963893] env[68638]: DEBUG nova.compute.manager [req-680c2716-3d12-4e98-9c58-17766e0af324 req-16708354-3fef-4749-b008-5f8d7cc191f1 service nova] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Detach interface failed, port_id=7a860c34-618e-494a-9a17-d5a14acf9fb5, reason: Instance 809416da-af6c-429d-b4b2-5334768aa744 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 927.973770] env[68638]: DEBUG oslo_vmware.api [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833897, 'name': Destroy_Task, 'duration_secs': 1.006853} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.985104] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Destroyed the VM [ 927.985294] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Deleting Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 927.985955] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4cef5dc1-cb75-4cb3-8001-94aa9b01adf9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.992338] env[68638]: DEBUG oslo_concurrency.lockutils [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.142s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.996192] env[68638]: DEBUG oslo_concurrency.lockutils [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.066s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.996445] env[68638]: DEBUG nova.objects.instance [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lazy-loading 'resources' on Instance uuid 6200613c-b5de-4774-b0c6-fdb78b4c7267 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 927.997751] env[68638]: DEBUG oslo_vmware.api [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 927.997751] env[68638]: value = "task-2833900" [ 927.997751] env[68638]: _type = "Task" [ 927.997751] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.007639] env[68638]: DEBUG oslo_vmware.api [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833900, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.026277] env[68638]: INFO nova.scheduler.client.report [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Deleted allocations for instance 92c90438-f7cc-4a48-bfac-f7912709cf88 [ 928.134401] env[68638]: INFO nova.compute.manager [-] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Took 1.38 seconds to deallocate network for instance. [ 928.253238] env[68638]: DEBUG oslo_vmware.api [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833899, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.427108] env[68638]: DEBUG nova.compute.manager [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 928.514568] env[68638]: DEBUG oslo_vmware.api [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833900, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.545119] env[68638]: DEBUG oslo_concurrency.lockutils [None req-59278d80-4a75-42f0-86a0-410c39adafe9 tempest-ServerMetadataTestJSON-418043587 tempest-ServerMetadataTestJSON-418043587-project-member] Lock "92c90438-f7cc-4a48-bfac-f7912709cf88" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.969s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 928.643845] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 928.756990] env[68638]: DEBUG oslo_vmware.api [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833899, 'name': PowerOnVM_Task, 'duration_secs': 0.63505} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.757288] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 928.764047] env[68638]: DEBUG nova.compute.manager [None req-29122c16-c12e-4485-9b38-6680b39319a7 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 928.765239] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-844c48a6-3b28-44f7-9f1e-f131145bbaba {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.878530] env[68638]: DEBUG nova.compute.manager [req-a48c392e-546e-45ef-bd0c-86b51dcd9a28 req-abe98e05-4e6b-4933-9364-60adf02c339d service nova] [instance: cd27220d-c706-4450-a01b-c871c608056f] Received event network-vif-plugged-f21435ee-deb0-43b9-8f82-b88274871ba9 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 928.879300] env[68638]: DEBUG oslo_concurrency.lockutils [req-a48c392e-546e-45ef-bd0c-86b51dcd9a28 req-abe98e05-4e6b-4933-9364-60adf02c339d service nova] Acquiring lock "cd27220d-c706-4450-a01b-c871c608056f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 928.879571] env[68638]: DEBUG oslo_concurrency.lockutils [req-a48c392e-546e-45ef-bd0c-86b51dcd9a28 req-abe98e05-4e6b-4933-9364-60adf02c339d service nova] Lock "cd27220d-c706-4450-a01b-c871c608056f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 928.880806] env[68638]: DEBUG oslo_concurrency.lockutils [req-a48c392e-546e-45ef-bd0c-86b51dcd9a28 req-abe98e05-4e6b-4933-9364-60adf02c339d service nova] Lock "cd27220d-c706-4450-a01b-c871c608056f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 928.880806] env[68638]: DEBUG nova.compute.manager [req-a48c392e-546e-45ef-bd0c-86b51dcd9a28 req-abe98e05-4e6b-4933-9364-60adf02c339d service nova] [instance: cd27220d-c706-4450-a01b-c871c608056f] No waiting events found dispatching network-vif-plugged-f21435ee-deb0-43b9-8f82-b88274871ba9 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 928.880806] env[68638]: WARNING nova.compute.manager [req-a48c392e-546e-45ef-bd0c-86b51dcd9a28 req-abe98e05-4e6b-4933-9364-60adf02c339d service nova] [instance: cd27220d-c706-4450-a01b-c871c608056f] Received unexpected event network-vif-plugged-f21435ee-deb0-43b9-8f82-b88274871ba9 for instance with vm_state building and task_state spawning. [ 928.901358] env[68638]: DEBUG nova.network.neutron [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Successfully updated port: f21435ee-deb0-43b9-8f82-b88274871ba9 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 928.960820] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 928.991630] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca070fa-e892-4327-b3b1-d055064e8e3f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.003391] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dffba76a-0f8a-4572-973f-61f0787cfdb8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.013935] env[68638]: DEBUG oslo_vmware.api [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833900, 'name': RemoveSnapshot_Task, 'duration_secs': 0.868305} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.041024] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Deleted Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 929.041318] env[68638]: INFO nova.compute.manager [None req-048e9e89-169f-4940-83c4-17db4074ef6c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Took 16.44 seconds to snapshot the instance on the hypervisor. [ 929.045123] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5baccff-c923-4c28-aa08-528141f6c5da {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.057460] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a167f47a-a945-48f8-9aee-4131bbc7e474 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.073837] env[68638]: DEBUG nova.compute.provider_tree [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 929.180476] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "61b9bce5-6a3e-4149-a759-d08e2e2301ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.180790] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "61b9bce5-6a3e-4149-a759-d08e2e2301ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.338972] env[68638]: DEBUG nova.network.neutron [-] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.404723] env[68638]: DEBUG oslo_concurrency.lockutils [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Acquiring lock "refresh_cache-cd27220d-c706-4450-a01b-c871c608056f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.404827] env[68638]: DEBUG oslo_concurrency.lockutils [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Acquired lock "refresh_cache-cd27220d-c706-4450-a01b-c871c608056f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 929.405008] env[68638]: DEBUG nova.network.neutron [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 929.426523] env[68638]: DEBUG nova.compute.manager [req-4c8837a4-1ab7-43aa-80cb-cb975365772a req-3c91aef6-8375-4537-b41a-341b5302afc5 service nova] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Received event network-changed-b541496e-247e-4bbb-bed9-6e9a9aa2a91f {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 929.426871] env[68638]: DEBUG nova.compute.manager [req-4c8837a4-1ab7-43aa-80cb-cb975365772a req-3c91aef6-8375-4537-b41a-341b5302afc5 service nova] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Refreshing instance network info cache due to event network-changed-b541496e-247e-4bbb-bed9-6e9a9aa2a91f. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 929.427015] env[68638]: DEBUG oslo_concurrency.lockutils [req-4c8837a4-1ab7-43aa-80cb-cb975365772a req-3c91aef6-8375-4537-b41a-341b5302afc5 service nova] Acquiring lock "refresh_cache-30193a76-a391-4a64-98cc-7e22dcf7218c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.427392] env[68638]: DEBUG oslo_concurrency.lockutils [req-4c8837a4-1ab7-43aa-80cb-cb975365772a req-3c91aef6-8375-4537-b41a-341b5302afc5 service nova] Acquired lock "refresh_cache-30193a76-a391-4a64-98cc-7e22dcf7218c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 929.427392] env[68638]: DEBUG nova.network.neutron [req-4c8837a4-1ab7-43aa-80cb-cb975365772a req-3c91aef6-8375-4537-b41a-341b5302afc5 service nova] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Refreshing network info cache for port b541496e-247e-4bbb-bed9-6e9a9aa2a91f {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 929.577804] env[68638]: DEBUG nova.scheduler.client.report [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 929.624261] env[68638]: INFO nova.compute.manager [None req-88092859-c434-407e-af06-40df7c00363a tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Unrescuing [ 929.624617] env[68638]: DEBUG oslo_concurrency.lockutils [None req-88092859-c434-407e-af06-40df7c00363a tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquiring lock "refresh_cache-9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.624776] env[68638]: DEBUG oslo_concurrency.lockutils [None req-88092859-c434-407e-af06-40df7c00363a tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquired lock "refresh_cache-9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 929.624942] env[68638]: DEBUG nova.network.neutron [None req-88092859-c434-407e-af06-40df7c00363a tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 929.684405] env[68638]: DEBUG nova.compute.manager [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 929.841342] env[68638]: INFO nova.compute.manager [-] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Took 1.98 seconds to deallocate network for instance. [ 929.966954] env[68638]: DEBUG nova.network.neutron [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 930.083242] env[68638]: DEBUG oslo_concurrency.lockutils [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.087s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.085580] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.949s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.085719] env[68638]: DEBUG nova.objects.instance [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lazy-loading 'resources' on Instance uuid 3c3fcbca-2477-4037-a978-4b8e9ed0a690 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 930.110556] env[68638]: INFO nova.scheduler.client.report [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Deleted allocations for instance 6200613c-b5de-4774-b0c6-fdb78b4c7267 [ 930.210286] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.212107] env[68638]: DEBUG nova.network.neutron [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Updating instance_info_cache with network_info: [{"id": "f21435ee-deb0-43b9-8f82-b88274871ba9", "address": "fa:16:3e:84:53:3a", "network": {"id": "febc12c5-c099-418f-89c0-bb636ef23e4d", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-2054153475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6565edffde9f43819387f38c2c375823", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e59b364d-b7f6-499d-b7dc-82b8a819aa12", "external-id": "nsx-vlan-transportzone-731", "segmentation_id": 731, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf21435ee-de", "ovs_interfaceid": "f21435ee-deb0-43b9-8f82-b88274871ba9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.391890] env[68638]: INFO nova.compute.manager [None req-cb65ada5-a2eb-4f0a-a5bc-85a2e0c834cc tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Took 0.55 seconds to detach 1 volumes for instance. [ 930.395585] env[68638]: DEBUG nova.compute.manager [None req-cb65ada5-a2eb-4f0a-a5bc-85a2e0c834cc tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Deleting volume: 4dc574c0-0283-4f21-ac01-f714b10306da {{(pid=68638) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 930.619149] env[68638]: DEBUG oslo_concurrency.lockutils [None req-510f6123-481d-49a9-9cb2-21260f93af44 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "6200613c-b5de-4774-b0c6-fdb78b4c7267" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.160s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.714743] env[68638]: DEBUG oslo_concurrency.lockutils [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Releasing lock "refresh_cache-cd27220d-c706-4450-a01b-c871c608056f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.715082] env[68638]: DEBUG nova.compute.manager [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Instance network_info: |[{"id": "f21435ee-deb0-43b9-8f82-b88274871ba9", "address": "fa:16:3e:84:53:3a", "network": {"id": "febc12c5-c099-418f-89c0-bb636ef23e4d", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-2054153475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6565edffde9f43819387f38c2c375823", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e59b364d-b7f6-499d-b7dc-82b8a819aa12", "external-id": "nsx-vlan-transportzone-731", "segmentation_id": 731, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf21435ee-de", "ovs_interfaceid": "f21435ee-deb0-43b9-8f82-b88274871ba9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 930.716775] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:53:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e59b364d-b7f6-499d-b7dc-82b8a819aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f21435ee-deb0-43b9-8f82-b88274871ba9', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 930.723771] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Creating folder: Project (6565edffde9f43819387f38c2c375823). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 930.724090] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-30ed0969-85ee-44dc-95ba-a1c886a4f352 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.740521] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Created folder: Project (6565edffde9f43819387f38c2c375823) in parent group-v569734. [ 930.740765] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Creating folder: Instances. Parent ref: group-v569950. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 930.741818] env[68638]: DEBUG nova.network.neutron [None req-88092859-c434-407e-af06-40df7c00363a tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Updating instance_info_cache with network_info: [{"id": "ab92a49b-2fbf-4108-96cb-3a64ba792c4b", "address": "fa:16:3e:62:b5:b8", "network": {"id": "2169592a-fe21-46a9-8c91-c7e04f04504e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1092388301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "46bace7ece424608bf9f88293ba6364c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b36c5ae6-c344-4bd1-8239-29128e2bbfbf", "external-id": "nsx-vlan-transportzone-214", "segmentation_id": 214, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab92a49b-2f", "ovs_interfaceid": "ab92a49b-2fbf-4108-96cb-3a64ba792c4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.743267] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7e28453f-21f2-413e-8875-d59abd16b0e9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.761782] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Created folder: Instances in parent group-v569950. [ 930.761889] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 930.761985] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd27220d-c706-4450-a01b-c871c608056f] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 930.762263] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bd020c2a-78e8-4e5e-8f84-390846978c56 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.780538] env[68638]: DEBUG nova.network.neutron [req-4c8837a4-1ab7-43aa-80cb-cb975365772a req-3c91aef6-8375-4537-b41a-341b5302afc5 service nova] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Updated VIF entry in instance network info cache for port b541496e-247e-4bbb-bed9-6e9a9aa2a91f. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 930.780885] env[68638]: DEBUG nova.network.neutron [req-4c8837a4-1ab7-43aa-80cb-cb975365772a req-3c91aef6-8375-4537-b41a-341b5302afc5 service nova] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Updating instance_info_cache with network_info: [{"id": "b541496e-247e-4bbb-bed9-6e9a9aa2a91f", "address": "fa:16:3e:76:5e:4b", "network": {"id": "ff60caea-8ade-4d28-940c-4b1787dfff4b", "bridge": "br-int", "label": "tempest-ServersTestJSON-1741337158-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b0e49266268a4fda9ac23822bb1436a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7b4bfde-f109-4f64-adab-e7f06b80685d", "external-id": "nsx-vlan-transportzone-910", "segmentation_id": 910, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb541496e-24", "ovs_interfaceid": "b541496e-247e-4bbb-bed9-6e9a9aa2a91f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.790742] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 930.790742] env[68638]: value = "task-2833904" [ 930.790742] env[68638]: _type = "Task" [ 930.790742] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.809765] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833904, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.958910] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb65ada5-a2eb-4f0a-a5bc-85a2e0c834cc tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 931.002465] env[68638]: DEBUG nova.compute.manager [req-a8a38fdc-7790-471c-9367-99150a6f3601 req-f4fb07ae-5d66-4fbf-9100-abf938c2ea7f service nova] [instance: cd27220d-c706-4450-a01b-c871c608056f] Received event network-changed-f21435ee-deb0-43b9-8f82-b88274871ba9 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 931.002983] env[68638]: DEBUG nova.compute.manager [req-a8a38fdc-7790-471c-9367-99150a6f3601 req-f4fb07ae-5d66-4fbf-9100-abf938c2ea7f service nova] [instance: cd27220d-c706-4450-a01b-c871c608056f] Refreshing instance network info cache due to event network-changed-f21435ee-deb0-43b9-8f82-b88274871ba9. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 931.002983] env[68638]: DEBUG oslo_concurrency.lockutils [req-a8a38fdc-7790-471c-9367-99150a6f3601 req-f4fb07ae-5d66-4fbf-9100-abf938c2ea7f service nova] Acquiring lock "refresh_cache-cd27220d-c706-4450-a01b-c871c608056f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.003157] env[68638]: DEBUG oslo_concurrency.lockutils [req-a8a38fdc-7790-471c-9367-99150a6f3601 req-f4fb07ae-5d66-4fbf-9100-abf938c2ea7f service nova] Acquired lock "refresh_cache-cd27220d-c706-4450-a01b-c871c608056f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 931.003281] env[68638]: DEBUG nova.network.neutron [req-a8a38fdc-7790-471c-9367-99150a6f3601 req-f4fb07ae-5d66-4fbf-9100-abf938c2ea7f service nova] [instance: cd27220d-c706-4450-a01b-c871c608056f] Refreshing network info cache for port f21435ee-deb0-43b9-8f82-b88274871ba9 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 931.101576] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03230c64-e8e2-41fb-8a8c-9affdd1a5be2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.111187] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5844af1-9c7b-4cb0-aa3a-91a3c92b2db0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.144734] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e47a8c-7223-4669-9cc5-5e7eea0d312b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.153363] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41363e57-3e30-4843-a8fa-b7bb1592dd54 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.171372] env[68638]: DEBUG nova.compute.provider_tree [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 931.246500] env[68638]: DEBUG oslo_concurrency.lockutils [None req-88092859-c434-407e-af06-40df7c00363a tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Releasing lock "refresh_cache-9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 931.247252] env[68638]: DEBUG nova.objects.instance [None req-88092859-c434-407e-af06-40df7c00363a tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Lazy-loading 'flavor' on Instance uuid 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 931.284172] env[68638]: DEBUG oslo_concurrency.lockutils [req-4c8837a4-1ab7-43aa-80cb-cb975365772a req-3c91aef6-8375-4537-b41a-341b5302afc5 service nova] Releasing lock "refresh_cache-30193a76-a391-4a64-98cc-7e22dcf7218c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 931.284172] env[68638]: DEBUG nova.compute.manager [req-4c8837a4-1ab7-43aa-80cb-cb975365772a req-3c91aef6-8375-4537-b41a-341b5302afc5 service nova] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Received event network-vif-deleted-c999665e-f15e-46cf-9d3c-b7252ab6a96a {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 931.301731] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833904, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.679532] env[68638]: DEBUG nova.scheduler.client.report [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 931.755566] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5adc8d1-7b78-4677-96fc-e35f11e6ad99 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.787703] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-88092859-c434-407e-af06-40df7c00363a tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 931.788108] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-62cb0306-21a0-47a1-b674-2355be7f0427 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.797375] env[68638]: DEBUG oslo_vmware.api [None req-88092859-c434-407e-af06-40df7c00363a tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 931.797375] env[68638]: value = "task-2833905" [ 931.797375] env[68638]: _type = "Task" [ 931.797375] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.804974] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833904, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.810527] env[68638]: DEBUG oslo_vmware.api [None req-88092859-c434-407e-af06-40df7c00363a tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833905, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.815116] env[68638]: DEBUG oslo_concurrency.lockutils [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "bb86aabd-129d-4c14-9db1-6676a5e7b9fa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 931.815828] env[68638]: DEBUG oslo_concurrency.lockutils [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "bb86aabd-129d-4c14-9db1-6676a5e7b9fa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.816035] env[68638]: DEBUG oslo_concurrency.lockutils [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "bb86aabd-129d-4c14-9db1-6676a5e7b9fa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 931.816233] env[68638]: DEBUG oslo_concurrency.lockutils [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "bb86aabd-129d-4c14-9db1-6676a5e7b9fa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.816406] env[68638]: DEBUG oslo_concurrency.lockutils [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "bb86aabd-129d-4c14-9db1-6676a5e7b9fa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.818675] env[68638]: INFO nova.compute.manager [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Terminating instance [ 931.820876] env[68638]: DEBUG nova.network.neutron [req-a8a38fdc-7790-471c-9367-99150a6f3601 req-f4fb07ae-5d66-4fbf-9100-abf938c2ea7f service nova] [instance: cd27220d-c706-4450-a01b-c871c608056f] Updated VIF entry in instance network info cache for port f21435ee-deb0-43b9-8f82-b88274871ba9. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 931.821251] env[68638]: DEBUG nova.network.neutron [req-a8a38fdc-7790-471c-9367-99150a6f3601 req-f4fb07ae-5d66-4fbf-9100-abf938c2ea7f service nova] [instance: cd27220d-c706-4450-a01b-c871c608056f] Updating instance_info_cache with network_info: [{"id": "f21435ee-deb0-43b9-8f82-b88274871ba9", "address": "fa:16:3e:84:53:3a", "network": {"id": "febc12c5-c099-418f-89c0-bb636ef23e4d", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-2054153475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6565edffde9f43819387f38c2c375823", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e59b364d-b7f6-499d-b7dc-82b8a819aa12", "external-id": "nsx-vlan-transportzone-731", "segmentation_id": 731, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf21435ee-de", "ovs_interfaceid": "f21435ee-deb0-43b9-8f82-b88274871ba9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.185414] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.100s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.188131] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.556s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 932.188249] env[68638]: DEBUG nova.objects.instance [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lazy-loading 'resources' on Instance uuid 9ba0f737-7947-409c-9163-79d621a29285 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 932.210381] env[68638]: INFO nova.scheduler.client.report [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Deleted allocations for instance 3c3fcbca-2477-4037-a978-4b8e9ed0a690 [ 932.302520] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833904, 'name': CreateVM_Task, 'duration_secs': 1.38322} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.305544] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd27220d-c706-4450-a01b-c871c608056f] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 932.306214] env[68638]: DEBUG oslo_concurrency.lockutils [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.306382] env[68638]: DEBUG oslo_concurrency.lockutils [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 932.306709] env[68638]: DEBUG oslo_concurrency.lockutils [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 932.307361] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-423c91ac-5753-472b-8db1-a36199a84b9a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.312074] env[68638]: DEBUG oslo_vmware.api [None req-88092859-c434-407e-af06-40df7c00363a tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833905, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.315425] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Waiting for the task: (returnval){ [ 932.315425] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]522389b2-c912-3aa6-93cd-a875f6f621dc" [ 932.315425] env[68638]: _type = "Task" [ 932.315425] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.323130] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]522389b2-c912-3aa6-93cd-a875f6f621dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.324690] env[68638]: DEBUG oslo_concurrency.lockutils [req-a8a38fdc-7790-471c-9367-99150a6f3601 req-f4fb07ae-5d66-4fbf-9100-abf938c2ea7f service nova] Releasing lock "refresh_cache-cd27220d-c706-4450-a01b-c871c608056f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.325395] env[68638]: DEBUG nova.compute.manager [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 932.325608] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 932.326396] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-785b0a72-0343-43f9-b352-2ca74951ba14 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.333745] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 932.333974] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a1c42fea-1e0a-4bf8-a9af-d06ae1952639 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.340822] env[68638]: DEBUG oslo_vmware.api [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 932.340822] env[68638]: value = "task-2833906" [ 932.340822] env[68638]: _type = "Task" [ 932.340822] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.349493] env[68638]: DEBUG oslo_vmware.api [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833906, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.718205] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7c8fcc19-a187-4748-a9f5-27c336ccf203 tempest-ImagesOneServerNegativeTestJSON-1105710103 tempest-ImagesOneServerNegativeTestJSON-1105710103-project-member] Lock "3c3fcbca-2477-4037-a978-4b8e9ed0a690" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.081s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.813913] env[68638]: DEBUG oslo_vmware.api [None req-88092859-c434-407e-af06-40df7c00363a tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833905, 'name': PowerOffVM_Task, 'duration_secs': 0.55184} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.816455] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-88092859-c434-407e-af06-40df7c00363a tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 932.821682] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-88092859-c434-407e-af06-40df7c00363a tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Reconfiguring VM instance instance-0000004d to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 932.822942] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d8a0ee4-ed27-489d-b977-55038921d9bf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.848531] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]522389b2-c912-3aa6-93cd-a875f6f621dc, 'name': SearchDatastore_Task, 'duration_secs': 0.010252} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.850258] env[68638]: DEBUG oslo_concurrency.lockutils [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.850488] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 932.850716] env[68638]: DEBUG oslo_concurrency.lockutils [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.850858] env[68638]: DEBUG oslo_concurrency.lockutils [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 932.852017] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 932.852017] env[68638]: DEBUG oslo_vmware.api [None req-88092859-c434-407e-af06-40df7c00363a tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 932.852017] env[68638]: value = "task-2833907" [ 932.852017] env[68638]: _type = "Task" [ 932.852017] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.854049] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-862df547-fd0e-4aec-814c-2de3db109a3c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.861456] env[68638]: DEBUG oslo_vmware.api [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833906, 'name': PowerOffVM_Task, 'duration_secs': 0.257388} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.862483] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 932.862483] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 932.862828] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-355dbd64-2818-4e24-add5-4d3ac2b59d4d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.869107] env[68638]: DEBUG oslo_vmware.api [None req-88092859-c434-407e-af06-40df7c00363a tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833907, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.873071] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 932.873417] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 932.874540] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a0def3c-9f7e-4a11-8ab6-d32ac200d350 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.880873] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Waiting for the task: (returnval){ [ 932.880873] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52eaffc5-7dd6-1335-bc2c-911ac449e820" [ 932.880873] env[68638]: _type = "Task" [ 932.880873] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.892300] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52eaffc5-7dd6-1335-bc2c-911ac449e820, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.948813] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 932.949056] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 932.949235] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Deleting the datastore file [datastore1] bb86aabd-129d-4c14-9db1-6676a5e7b9fa {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 932.949513] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-641e71c6-2919-45a2-9c3c-56b76f77539d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.957902] env[68638]: DEBUG oslo_vmware.api [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 932.957902] env[68638]: value = "task-2833909" [ 932.957902] env[68638]: _type = "Task" [ 932.957902] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.969862] env[68638]: DEBUG oslo_vmware.api [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833909, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.062101] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ec28d4a-e258-48f7-8b89-612d1e093519 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.070093] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd18c229-0339-4459-ac25-06c4cd91b410 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.101766] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd34a406-e8f6-4fc1-8576-69c906058241 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.110178] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae4af18b-d7f7-42bb-9c52-085a3e6a9f1e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.125528] env[68638]: DEBUG nova.compute.provider_tree [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 933.365580] env[68638]: DEBUG oslo_vmware.api [None req-88092859-c434-407e-af06-40df7c00363a tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833907, 'name': ReconfigVM_Task, 'duration_secs': 0.358551} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.365874] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-88092859-c434-407e-af06-40df7c00363a tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Reconfigured VM instance instance-0000004d to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 933.366073] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-88092859-c434-407e-af06-40df7c00363a tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 933.366331] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-58c304fe-67ad-4a5b-b719-376a6b6f03ff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.374117] env[68638]: DEBUG oslo_vmware.api [None req-88092859-c434-407e-af06-40df7c00363a tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 933.374117] env[68638]: value = "task-2833910" [ 933.374117] env[68638]: _type = "Task" [ 933.374117] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.389598] env[68638]: DEBUG oslo_vmware.api [None req-88092859-c434-407e-af06-40df7c00363a tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833910, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.393540] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52eaffc5-7dd6-1335-bc2c-911ac449e820, 'name': SearchDatastore_Task, 'duration_secs': 0.011886} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.394428] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a472e84-6d4c-4a09-a860-0c18b57f5c29 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.400684] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Waiting for the task: (returnval){ [ 933.400684] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5298fa7e-ed95-3e42-7e8d-ca1a162c4b35" [ 933.400684] env[68638]: _type = "Task" [ 933.400684] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.410658] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5298fa7e-ed95-3e42-7e8d-ca1a162c4b35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.469961] env[68638]: DEBUG oslo_vmware.api [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833909, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.36781} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.470386] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 933.470743] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 933.470865] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 933.471743] env[68638]: INFO nova.compute.manager [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Took 1.15 seconds to destroy the instance on the hypervisor. [ 933.471743] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 933.471743] env[68638]: DEBUG nova.compute.manager [-] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 933.471743] env[68638]: DEBUG nova.network.neutron [-] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 933.649011] env[68638]: ERROR nova.scheduler.client.report [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [req-195edd6c-d582-45d8-b436-1c00508c821c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-195edd6c-d582-45d8-b436-1c00508c821c"}]} [ 933.673731] env[68638]: DEBUG nova.scheduler.client.report [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 933.694144] env[68638]: DEBUG nova.scheduler.client.report [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 933.694385] env[68638]: DEBUG nova.compute.provider_tree [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 933.710196] env[68638]: DEBUG nova.scheduler.client.report [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 933.736666] env[68638]: DEBUG nova.scheduler.client.report [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 933.839211] env[68638]: DEBUG nova.compute.manager [req-75372799-1011-49a5-af8b-e1a140494711 req-564240d0-c193-4322-abb6-1bdf920cdd35 service nova] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Received event network-vif-deleted-ab64b5b6-6ab8-4d2a-ab52-5e5702f19dad {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 933.839543] env[68638]: INFO nova.compute.manager [req-75372799-1011-49a5-af8b-e1a140494711 req-564240d0-c193-4322-abb6-1bdf920cdd35 service nova] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Neutron deleted interface ab64b5b6-6ab8-4d2a-ab52-5e5702f19dad; detaching it from the instance and deleting it from the info cache [ 933.839764] env[68638]: DEBUG nova.network.neutron [req-75372799-1011-49a5-af8b-e1a140494711 req-564240d0-c193-4322-abb6-1bdf920cdd35 service nova] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.885234] env[68638]: DEBUG oslo_vmware.api [None req-88092859-c434-407e-af06-40df7c00363a tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833910, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.911056] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5298fa7e-ed95-3e42-7e8d-ca1a162c4b35, 'name': SearchDatastore_Task, 'duration_secs': 0.012504} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.911743] env[68638]: DEBUG oslo_concurrency.lockutils [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 933.912082] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] cd27220d-c706-4450-a01b-c871c608056f/cd27220d-c706-4450-a01b-c871c608056f.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 933.913179] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a273da6f-21f4-412c-b54e-957c9726a34c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.920745] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Waiting for the task: (returnval){ [ 933.920745] env[68638]: value = "task-2833911" [ 933.920745] env[68638]: _type = "Task" [ 933.920745] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.930205] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Task: {'id': task-2833911, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.147440] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0c43bd-aea7-4067-8058-de875ad54237 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.156890] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca82ec2-b0ae-44c5-9ca2-17ec10a84566 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.197324] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc13c3b-f2fd-484b-ad8b-d3d7a32dd4d0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.207539] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-866de26e-ecc0-458b-bebe-394c3567f406 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.224634] env[68638]: DEBUG nova.compute.provider_tree [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 934.265047] env[68638]: DEBUG nova.network.neutron [-] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.343164] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bd7e91f6-59e6-450c-a30f-8492ca5f456c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.354569] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-337c91d9-0b55-4c5c-adc8-de4ef7860f95 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.395912] env[68638]: DEBUG nova.compute.manager [req-75372799-1011-49a5-af8b-e1a140494711 req-564240d0-c193-4322-abb6-1bdf920cdd35 service nova] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Detach interface failed, port_id=ab64b5b6-6ab8-4d2a-ab52-5e5702f19dad, reason: Instance bb86aabd-129d-4c14-9db1-6676a5e7b9fa could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 934.399829] env[68638]: DEBUG oslo_vmware.api [None req-88092859-c434-407e-af06-40df7c00363a tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833910, 'name': PowerOnVM_Task, 'duration_secs': 0.648195} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.400157] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-88092859-c434-407e-af06-40df7c00363a tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 934.401750] env[68638]: DEBUG nova.compute.manager [None req-88092859-c434-407e-af06-40df7c00363a tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 934.401750] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb1de79-886a-4f03-a228-65ef4f0de010 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.437248] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Task: {'id': task-2833911, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.623182] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b76de43-61c4-4a53-8032-4bc84ab642c3 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "14c1dba5-98cb-4ebd-8e76-60b3f74cca4b" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.623424] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b76de43-61c4-4a53-8032-4bc84ab642c3 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "14c1dba5-98cb-4ebd-8e76-60b3f74cca4b" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.761269] env[68638]: DEBUG nova.scheduler.client.report [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 104 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 934.761542] env[68638]: DEBUG nova.compute.provider_tree [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 104 to 105 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 934.761723] env[68638]: DEBUG nova.compute.provider_tree [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 934.768225] env[68638]: INFO nova.compute.manager [-] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Took 1.30 seconds to deallocate network for instance. [ 934.936615] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Task: {'id': task-2833911, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.546758} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.938307] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] cd27220d-c706-4450-a01b-c871c608056f/cd27220d-c706-4450-a01b-c871c608056f.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 934.938966] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 934.939254] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e12e5aac-e205-4fc7-9e02-c765460a9c07 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.948190] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Waiting for the task: (returnval){ [ 934.948190] env[68638]: value = "task-2833912" [ 934.948190] env[68638]: _type = "Task" [ 934.948190] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.957819] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Task: {'id': task-2833912, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.126771] env[68638]: DEBUG nova.compute.utils [None req-0b76de43-61c4-4a53-8032-4bc84ab642c3 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 935.267378] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.079s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.270951] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.229s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.271227] env[68638]: DEBUG nova.objects.instance [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Lazy-loading 'resources' on Instance uuid fd6d5951-f2a1-422d-b137-4d19759f9060 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 935.275144] env[68638]: DEBUG oslo_concurrency.lockutils [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.291668] env[68638]: INFO nova.scheduler.client.report [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Deleted allocations for instance 9ba0f737-7947-409c-9163-79d621a29285 [ 935.459523] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Task: {'id': task-2833912, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.216238} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.459876] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 935.460664] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6345d54d-d12f-4a14-ab70-d3c1315bee42 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.484265] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] cd27220d-c706-4450-a01b-c871c608056f/cd27220d-c706-4450-a01b-c871c608056f.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 935.487295] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2bc839f-cbed-41a9-93b6-16268e5ee419 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.506904] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Waiting for the task: (returnval){ [ 935.506904] env[68638]: value = "task-2833913" [ 935.506904] env[68638]: _type = "Task" [ 935.506904] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.526101] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Task: {'id': task-2833913, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.629926] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b76de43-61c4-4a53-8032-4bc84ab642c3 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "14c1dba5-98cb-4ebd-8e76-60b3f74cca4b" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.802907] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0446f8a2-9794-4188-8821-0358e37e9fdc tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "9ba0f737-7947-409c-9163-79d621a29285" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.671s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.859498] env[68638]: DEBUG oslo_concurrency.lockutils [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquiring lock "9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.859756] env[68638]: DEBUG oslo_concurrency.lockutils [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Lock "9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.859969] env[68638]: DEBUG oslo_concurrency.lockutils [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquiring lock "9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.860226] env[68638]: DEBUG oslo_concurrency.lockutils [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Lock "9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.860413] env[68638]: DEBUG oslo_concurrency.lockutils [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Lock "9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.863163] env[68638]: INFO nova.compute.manager [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Terminating instance [ 936.019656] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Task: {'id': task-2833913, 'name': ReconfigVM_Task, 'duration_secs': 0.314801} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.019977] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Reconfigured VM instance instance-0000004f to attach disk [datastore2] cd27220d-c706-4450-a01b-c871c608056f/cd27220d-c706-4450-a01b-c871c608056f.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 936.021317] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-516e73d6-0cde-4224-9422-23a8cc8110ed {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.029435] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Waiting for the task: (returnval){ [ 936.029435] env[68638]: value = "task-2833914" [ 936.029435] env[68638]: _type = "Task" [ 936.029435] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.039895] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Task: {'id': task-2833914, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.127644] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-528b4257-664d-4b5e-8601-c7304f7294fe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.136867] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a01558e1-9519-49f4-af20-c69df3e533ad {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.174029] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57bc64d5-ff82-49ec-8462-cce7c1c384fd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.182522] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c8b8b0-180b-49b6-9477-1b6c269eb90c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.197232] env[68638]: DEBUG nova.compute.provider_tree [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 936.367305] env[68638]: DEBUG nova.compute.manager [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 936.367601] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 936.368852] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fbb0760-3c3e-46a0-96c1-1043f9c897f1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.378820] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 936.379155] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-362372d5-d978-4582-99d6-8d9137791109 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.389244] env[68638]: DEBUG oslo_vmware.api [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 936.389244] env[68638]: value = "task-2833915" [ 936.389244] env[68638]: _type = "Task" [ 936.389244] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.398234] env[68638]: DEBUG oslo_vmware.api [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833915, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.541914] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Task: {'id': task-2833914, 'name': Rename_Task, 'duration_secs': 0.151198} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.542300] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 936.542563] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-77c05579-8c23-4894-b103-a9a8bc9bd65c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.552198] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Waiting for the task: (returnval){ [ 936.552198] env[68638]: value = "task-2833916" [ 936.552198] env[68638]: _type = "Task" [ 936.552198] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.560931] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Task: {'id': task-2833916, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.716300] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b76de43-61c4-4a53-8032-4bc84ab642c3 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "14c1dba5-98cb-4ebd-8e76-60b3f74cca4b" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.716574] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b76de43-61c4-4a53-8032-4bc84ab642c3 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "14c1dba5-98cb-4ebd-8e76-60b3f74cca4b" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.716867] env[68638]: INFO nova.compute.manager [None req-0b76de43-61c4-4a53-8032-4bc84ab642c3 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Attaching volume 3242773b-24c0-4e87-8db6-f2d6f9823068 to /dev/sdb [ 936.720207] env[68638]: ERROR nova.scheduler.client.report [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] [req-684ce705-4825-4b38-ad65-dd59d3e60038] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-684ce705-4825-4b38-ad65-dd59d3e60038"}]} [ 936.738872] env[68638]: DEBUG nova.scheduler.client.report [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 936.758821] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7cd8373-ecab-4e08-bf10-799de3d6953e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.764577] env[68638]: DEBUG nova.scheduler.client.report [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 936.764577] env[68638]: DEBUG nova.compute.provider_tree [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 936.772133] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a10463b5-0e94-4992-b9c0-4d7558870901 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.776659] env[68638]: DEBUG nova.scheduler.client.report [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 936.787837] env[68638]: DEBUG nova.virt.block_device [None req-0b76de43-61c4-4a53-8032-4bc84ab642c3 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Updating existing volume attachment record: 10774387-49f8-447f-bc5b-cbf8febce1f3 {{(pid=68638) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 936.796309] env[68638]: DEBUG nova.scheduler.client.report [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 936.899820] env[68638]: DEBUG oslo_vmware.api [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833915, 'name': PowerOffVM_Task, 'duration_secs': 0.288952} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.900436] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 936.900621] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 936.900893] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6630650f-4d43-4103-b944-e7df8ead86e9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.961913] env[68638]: DEBUG oslo_concurrency.lockutils [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "94af9123-435f-4ae4-8b6d-82838df61d4e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.962170] env[68638]: DEBUG oslo_concurrency.lockutils [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "94af9123-435f-4ae4-8b6d-82838df61d4e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.974832] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 936.975110] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 936.975318] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Deleting the datastore file [datastore1] 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 936.975618] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e5c5a191-8c18-416f-9ea3-4262b0239441 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.984505] env[68638]: DEBUG oslo_vmware.api [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 936.984505] env[68638]: value = "task-2833919" [ 936.984505] env[68638]: _type = "Task" [ 936.984505] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.995322] env[68638]: DEBUG oslo_vmware.api [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833919, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.067664] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Task: {'id': task-2833916, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.216700] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c154cf4-a233-4831-9970-3901889ca89b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.226421] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c25456-926f-411c-8dff-8eadd07eb5fb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.261876] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f14bfe-2c39-47e0-bbc4-001462312ca6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.270230] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-472b43a1-b280-4055-a973-d71db42c4779 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.287780] env[68638]: DEBUG nova.compute.provider_tree [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 937.465664] env[68638]: DEBUG nova.compute.manager [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 937.496381] env[68638]: DEBUG oslo_vmware.api [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833919, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.299861} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.496677] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 937.496887] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 937.497302] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 937.497302] env[68638]: INFO nova.compute.manager [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Took 1.13 seconds to destroy the instance on the hypervisor. [ 937.497559] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 937.497806] env[68638]: DEBUG nova.compute.manager [-] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 937.497908] env[68638]: DEBUG nova.network.neutron [-] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 937.568471] env[68638]: DEBUG oslo_vmware.api [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Task: {'id': task-2833916, 'name': PowerOnVM_Task, 'duration_secs': 0.611353} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.568471] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 937.568471] env[68638]: INFO nova.compute.manager [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Took 9.68 seconds to spawn the instance on the hypervisor. [ 937.568952] env[68638]: DEBUG nova.compute.manager [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 937.569451] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8180111a-13a7-4a00-b46b-9331bfd43bb9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.803470] env[68638]: DEBUG nova.compute.manager [req-a61ff7e6-3f54-42b5-87c4-6f34f8e27939 req-3be43baa-1148-4520-9c65-148a3a4c99f0 service nova] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Received event network-vif-deleted-ab92a49b-2fbf-4108-96cb-3a64ba792c4b {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 937.803734] env[68638]: INFO nova.compute.manager [req-a61ff7e6-3f54-42b5-87c4-6f34f8e27939 req-3be43baa-1148-4520-9c65-148a3a4c99f0 service nova] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Neutron deleted interface ab92a49b-2fbf-4108-96cb-3a64ba792c4b; detaching it from the instance and deleting it from the info cache [ 937.803906] env[68638]: DEBUG nova.network.neutron [req-a61ff7e6-3f54-42b5-87c4-6f34f8e27939 req-3be43baa-1148-4520-9c65-148a3a4c99f0 service nova] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.824214] env[68638]: DEBUG nova.scheduler.client.report [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 106 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 937.824719] env[68638]: DEBUG nova.compute.provider_tree [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 106 to 107 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 937.824801] env[68638]: DEBUG nova.compute.provider_tree [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 937.987912] env[68638]: DEBUG oslo_concurrency.lockutils [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.090857] env[68638]: INFO nova.compute.manager [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Took 35.37 seconds to build instance. [ 938.257256] env[68638]: DEBUG nova.network.neutron [-] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.306523] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-688ee1ab-9a8c-4368-9b8a-5f2aa4820e68 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.317978] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f9e17e-60f3-4ea9-bebe-3de5def698ff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.330046] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.059s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.332435] env[68638]: DEBUG oslo_concurrency.lockutils [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.427s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.332435] env[68638]: DEBUG nova.objects.instance [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lazy-loading 'resources' on Instance uuid 333d88b6-2182-4e9c-9430-058e67921828 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 938.352897] env[68638]: DEBUG nova.compute.manager [req-a61ff7e6-3f54-42b5-87c4-6f34f8e27939 req-3be43baa-1148-4520-9c65-148a3a4c99f0 service nova] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Detach interface failed, port_id=ab92a49b-2fbf-4108-96cb-3a64ba792c4b, reason: Instance 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 938.354048] env[68638]: INFO nova.scheduler.client.report [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Deleted allocations for instance fd6d5951-f2a1-422d-b137-4d19759f9060 [ 938.597256] env[68638]: DEBUG oslo_concurrency.lockutils [None req-35632d42-449b-4d43-971f-b726c99923ed tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Lock "cd27220d-c706-4450-a01b-c871c608056f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.892s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.597639] env[68638]: DEBUG oslo_concurrency.lockutils [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Acquiring lock "cd27220d-c706-4450-a01b-c871c608056f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.597777] env[68638]: DEBUG oslo_concurrency.lockutils [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Lock "cd27220d-c706-4450-a01b-c871c608056f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.597975] env[68638]: DEBUG oslo_concurrency.lockutils [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Acquiring lock "cd27220d-c706-4450-a01b-c871c608056f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.598184] env[68638]: DEBUG oslo_concurrency.lockutils [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Lock "cd27220d-c706-4450-a01b-c871c608056f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.598346] env[68638]: DEBUG oslo_concurrency.lockutils [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Lock "cd27220d-c706-4450-a01b-c871c608056f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.600474] env[68638]: INFO nova.compute.manager [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Terminating instance [ 938.758893] env[68638]: INFO nova.compute.manager [-] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Took 1.26 seconds to deallocate network for instance. [ 938.860530] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b3e38520-a2f6-428c-869c-28833b85af06 tempest-ServersNegativeTestMultiTenantJSON-315914804 tempest-ServersNegativeTestMultiTenantJSON-315914804-project-member] Lock "fd6d5951-f2a1-422d-b137-4d19759f9060" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.268s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.103957] env[68638]: DEBUG nova.compute.manager [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 939.104226] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 939.105289] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4980ecb6-7bef-4da6-91b4-37546af3a5f6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.116371] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 939.116636] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-441d31ef-81fb-4702-9b32-8b19ecb6b46f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.125714] env[68638]: DEBUG oslo_vmware.api [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Waiting for the task: (returnval){ [ 939.125714] env[68638]: value = "task-2833922" [ 939.125714] env[68638]: _type = "Task" [ 939.125714] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.137486] env[68638]: DEBUG oslo_vmware.api [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Task: {'id': task-2833922, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.161581] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be15a572-57dc-4a0d-a5eb-cce541eecdcd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.171027] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3814f80-c581-485b-aa26-f8e9f08c95bf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.205735] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57f87807-591b-433d-bffd-398499b4845b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.214881] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1baeca8c-bf40-4da6-8239-08c277fd5769 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.230136] env[68638]: DEBUG nova.compute.provider_tree [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 939.267740] env[68638]: DEBUG oslo_concurrency.lockutils [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.637508] env[68638]: DEBUG oslo_vmware.api [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Task: {'id': task-2833922, 'name': PowerOffVM_Task, 'duration_secs': 0.201628} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.637865] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 939.638116] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 939.638449] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bbb9ad36-2ac7-4cb9-9c84-8dd9479115b8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.706678] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 939.706865] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 939.707092] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Deleting the datastore file [datastore2] cd27220d-c706-4450-a01b-c871c608056f {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 939.707369] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cd03aa04-fb93-4912-a01e-8f71ae29ba5f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.714833] env[68638]: DEBUG oslo_vmware.api [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Waiting for the task: (returnval){ [ 939.714833] env[68638]: value = "task-2833925" [ 939.714833] env[68638]: _type = "Task" [ 939.714833] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.723181] env[68638]: DEBUG oslo_vmware.api [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Task: {'id': task-2833925, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.733216] env[68638]: DEBUG nova.scheduler.client.report [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 940.224666] env[68638]: DEBUG oslo_vmware.api [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Task: {'id': task-2833925, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169651} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.224862] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 940.225065] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 940.225255] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 940.225480] env[68638]: INFO nova.compute.manager [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] [instance: cd27220d-c706-4450-a01b-c871c608056f] Took 1.12 seconds to destroy the instance on the hypervisor. [ 940.225782] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 940.225982] env[68638]: DEBUG nova.compute.manager [-] [instance: cd27220d-c706-4450-a01b-c871c608056f] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 940.226094] env[68638]: DEBUG nova.network.neutron [-] [instance: cd27220d-c706-4450-a01b-c871c608056f] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 940.238573] env[68638]: DEBUG oslo_concurrency.lockutils [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.906s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 940.241667] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.894s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.241667] env[68638]: DEBUG nova.objects.instance [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lazy-loading 'resources' on Instance uuid c80895d5-1a59-4779-9da9-9aeec10bc395 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 940.260959] env[68638]: INFO nova.scheduler.client.report [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Deleted allocations for instance 333d88b6-2182-4e9c-9430-058e67921828 [ 940.681254] env[68638]: DEBUG nova.compute.manager [req-43f091b0-aa47-4ed1-9c23-61584d1030e4 req-353ff52f-5572-4bf0-8bec-5f2e3e197037 service nova] [instance: cd27220d-c706-4450-a01b-c871c608056f] Received event network-vif-deleted-f21435ee-deb0-43b9-8f82-b88274871ba9 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 940.681656] env[68638]: INFO nova.compute.manager [req-43f091b0-aa47-4ed1-9c23-61584d1030e4 req-353ff52f-5572-4bf0-8bec-5f2e3e197037 service nova] [instance: cd27220d-c706-4450-a01b-c871c608056f] Neutron deleted interface f21435ee-deb0-43b9-8f82-b88274871ba9; detaching it from the instance and deleting it from the info cache [ 940.681833] env[68638]: DEBUG nova.network.neutron [req-43f091b0-aa47-4ed1-9c23-61584d1030e4 req-353ff52f-5572-4bf0-8bec-5f2e3e197037 service nova] [instance: cd27220d-c706-4450-a01b-c871c608056f] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.768734] env[68638]: DEBUG oslo_concurrency.lockutils [None req-45ccf8d1-a6c6-4dca-ad05-8095ba6305d0 tempest-ListImageFiltersTestJSON-1865346341 tempest-ListImageFiltersTestJSON-1865346341-project-member] Lock "333d88b6-2182-4e9c-9430-058e67921828" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.316s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 941.072695] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82425056-e2ca-4fd0-a1bf-9f747f40c5cb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.080998] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a859146-655d-4186-a061-0079616d90a5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.115059] env[68638]: DEBUG nova.network.neutron [-] [instance: cd27220d-c706-4450-a01b-c871c608056f] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.117222] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38aaedc7-e359-401d-9325-70b8203c4623 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.126444] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cee7525-d966-4a0e-8c99-03ab8543069e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.143195] env[68638]: DEBUG nova.compute.provider_tree [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.184621] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f69a742c-4bcc-40d5-8b84-a1a6f760944f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.195760] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be334bf-dd34-4913-9b83-04ac9ee9f099 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.232325] env[68638]: DEBUG nova.compute.manager [req-43f091b0-aa47-4ed1-9c23-61584d1030e4 req-353ff52f-5572-4bf0-8bec-5f2e3e197037 service nova] [instance: cd27220d-c706-4450-a01b-c871c608056f] Detach interface failed, port_id=f21435ee-deb0-43b9-8f82-b88274871ba9, reason: Instance cd27220d-c706-4450-a01b-c871c608056f could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 941.617464] env[68638]: INFO nova.compute.manager [-] [instance: cd27220d-c706-4450-a01b-c871c608056f] Took 1.39 seconds to deallocate network for instance. [ 941.646810] env[68638]: DEBUG nova.scheduler.client.report [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 941.843162] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b76de43-61c4-4a53-8032-4bc84ab642c3 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Volume attach. Driver type: vmdk {{(pid=68638) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 941.843402] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b76de43-61c4-4a53-8032-4bc84ab642c3 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569954', 'volume_id': '3242773b-24c0-4e87-8db6-f2d6f9823068', 'name': 'volume-3242773b-24c0-4e87-8db6-f2d6f9823068', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '14c1dba5-98cb-4ebd-8e76-60b3f74cca4b', 'attached_at': '', 'detached_at': '', 'volume_id': '3242773b-24c0-4e87-8db6-f2d6f9823068', 'serial': '3242773b-24c0-4e87-8db6-f2d6f9823068'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 941.844271] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0871d733-d38f-4c3b-a2eb-420162eaf9d6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.870988] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8864547-e975-43ab-a764-4d5e151296f9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.896347] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b76de43-61c4-4a53-8032-4bc84ab642c3 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] volume-3242773b-24c0-4e87-8db6-f2d6f9823068/volume-3242773b-24c0-4e87-8db6-f2d6f9823068.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 941.896603] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-629f08d8-e8d2-4210-9a4a-13199c662e64 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.917302] env[68638]: DEBUG oslo_vmware.api [None req-0b76de43-61c4-4a53-8032-4bc84ab642c3 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 941.917302] env[68638]: value = "task-2833926" [ 941.917302] env[68638]: _type = "Task" [ 941.917302] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.926647] env[68638]: DEBUG oslo_vmware.api [None req-0b76de43-61c4-4a53-8032-4bc84ab642c3 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833926, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.124576] env[68638]: DEBUG oslo_concurrency.lockutils [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 942.152999] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.912s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 942.155444] env[68638]: DEBUG oslo_concurrency.lockutils [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.125s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 942.155722] env[68638]: DEBUG nova.objects.instance [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lazy-loading 'resources' on Instance uuid d2d30008-5058-4be3-b803-00d8ca4450d5 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 942.184549] env[68638]: INFO nova.scheduler.client.report [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Deleted allocations for instance c80895d5-1a59-4779-9da9-9aeec10bc395 [ 942.434098] env[68638]: DEBUG oslo_vmware.api [None req-0b76de43-61c4-4a53-8032-4bc84ab642c3 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833926, 'name': ReconfigVM_Task, 'duration_secs': 0.425719} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.434098] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b76de43-61c4-4a53-8032-4bc84ab642c3 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Reconfigured VM instance instance-00000040 to attach disk [datastore2] volume-3242773b-24c0-4e87-8db6-f2d6f9823068/volume-3242773b-24c0-4e87-8db6-f2d6f9823068.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 942.435769] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1fb789ea-8792-4c6e-9863-8c402d02092f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.461729] env[68638]: DEBUG oslo_vmware.api [None req-0b76de43-61c4-4a53-8032-4bc84ab642c3 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 942.461729] env[68638]: value = "task-2833927" [ 942.461729] env[68638]: _type = "Task" [ 942.461729] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.477077] env[68638]: DEBUG oslo_vmware.api [None req-0b76de43-61c4-4a53-8032-4bc84ab642c3 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833927, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.695377] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f509a507-9dd9-4cc7-92b9-23889677fb34 tempest-VolumesAdminNegativeTest-1989685557 tempest-VolumesAdminNegativeTest-1989685557-project-member] Lock "c80895d5-1a59-4779-9da9-9aeec10bc395" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.265s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 942.977010] env[68638]: DEBUG oslo_vmware.api [None req-0b76de43-61c4-4a53-8032-4bc84ab642c3 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833927, 'name': ReconfigVM_Task, 'duration_secs': 0.194195} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.977356] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b76de43-61c4-4a53-8032-4bc84ab642c3 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569954', 'volume_id': '3242773b-24c0-4e87-8db6-f2d6f9823068', 'name': 'volume-3242773b-24c0-4e87-8db6-f2d6f9823068', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '14c1dba5-98cb-4ebd-8e76-60b3f74cca4b', 'attached_at': '', 'detached_at': '', 'volume_id': '3242773b-24c0-4e87-8db6-f2d6f9823068', 'serial': '3242773b-24c0-4e87-8db6-f2d6f9823068'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 943.075379] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deac646c-72b1-4650-87e5-f819061dcef6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.084277] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1809c1be-9a54-4c18-9a67-cd4f80e12eb3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.123112] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1aa7b89-47f6-4eff-b4a9-624b192dbc87 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.134760] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-595eed2a-8ca7-41c4-bcad-f8e0c56c4d13 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.149061] env[68638]: DEBUG nova.compute.provider_tree [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.653147] env[68638]: DEBUG nova.scheduler.client.report [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 944.036048] env[68638]: DEBUG nova.objects.instance [None req-0b76de43-61c4-4a53-8032-4bc84ab642c3 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lazy-loading 'flavor' on Instance uuid 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 944.158445] env[68638]: DEBUG oslo_concurrency.lockutils [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.003s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.162437] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.427s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.195138] env[68638]: INFO nova.scheduler.client.report [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Deleted allocations for instance d2d30008-5058-4be3-b803-00d8ca4450d5 [ 944.543977] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21283774-8500-4096-a15a-a4e858a87502 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.547602] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b76de43-61c4-4a53-8032-4bc84ab642c3 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "14c1dba5-98cb-4ebd-8e76-60b3f74cca4b" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.831s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.558671] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08fe44bb-6162-44cf-85ec-1be40a149522 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.597274] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b03d966-ce42-40f5-84dc-96ecdcfcb68b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.609179] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77fea41d-72ad-4ed2-ba96-9a5d2c865bcc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.627606] env[68638]: DEBUG nova.compute.provider_tree [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 944.707999] env[68638]: DEBUG oslo_concurrency.lockutils [None req-683a3190-aef1-49b5-b816-9e7456f920fe tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "d2d30008-5058-4be3-b803-00d8ca4450d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.108s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.131580] env[68638]: DEBUG nova.scheduler.client.report [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 945.604656] env[68638]: INFO nova.compute.manager [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Rebuilding instance [ 945.638922] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.478s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.639573] env[68638]: INFO nova.compute.manager [None req-b15cb600-008e-4645-a915-7f7415667845 tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Successfully reverted task state from rebuilding on failure for instance. [ 945.646162] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.876s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.646408] env[68638]: DEBUG nova.objects.instance [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lazy-loading 'resources' on Instance uuid 7617a7b1-3b21-4d38-b090-1d35bc74637b {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 945.677245] env[68638]: DEBUG nova.compute.manager [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 945.677943] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ebe560-7c94-4e13-b28c-881f939c1ab7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.341257] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquiring lock "ea8f58dc-1542-4723-bf86-369d4dff5f25" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 946.343114] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "ea8f58dc-1542-4723-bf86-369d4dff5f25" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.355727] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Acquiring lock "27ff37a6-de93-4a4b-904f-a91fdb8b0aff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 946.355727] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Lock "27ff37a6-de93-4a4b-904f-a91fdb8b0aff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.355727] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Acquiring lock "27ff37a6-de93-4a4b-904f-a91fdb8b0aff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 946.355727] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Lock "27ff37a6-de93-4a4b-904f-a91fdb8b0aff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.355727] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Lock "27ff37a6-de93-4a4b-904f-a91fdb8b0aff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.356912] env[68638]: INFO nova.compute.manager [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Terminating instance [ 946.372781] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquiring lock "32d43fce-837d-41d9-be11-a0c3cdb1694b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 946.373048] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "32d43fce-837d-41d9-be11-a0c3cdb1694b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.413263] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquiring lock "e9b8e5ad-4d47-48ad-995f-b28d0230df0f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 946.414059] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "e9b8e5ad-4d47-48ad-995f-b28d0230df0f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.597882] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8467654-dc90-4613-a8fd-c7fa31f6137e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.605069] env[68638]: DEBUG oslo_concurrency.lockutils [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "43e0eed3-bc25-476d-a9ef-6b132514cf90" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 946.605339] env[68638]: DEBUG oslo_concurrency.lockutils [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "43e0eed3-bc25-476d-a9ef-6b132514cf90" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.609968] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8a06c1-4f08-4aa7-8caf-b4390a42d133 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.647157] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c0da4f0-9db9-4d8b-b4b5-bf28c718c109 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.659593] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ead6c979-3f64-407e-85e0-a2201fb68b5e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.679414] env[68638]: DEBUG nova.compute.provider_tree [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 946.701619] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 946.701907] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-08bde269-2f22-4552-9b5f-45e7aec6d40f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.710841] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 946.710841] env[68638]: value = "task-2833928" [ 946.710841] env[68638]: _type = "Task" [ 946.710841] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.720352] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833928, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.844993] env[68638]: DEBUG nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 946.862933] env[68638]: DEBUG nova.compute.manager [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 946.862933] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 946.863930] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1195d2f4-056c-4f73-afc3-4d2452549e3c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.874745] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 946.875234] env[68638]: DEBUG nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 946.877850] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7fe63f8b-0433-4cb1-aca3-3407f0e82415 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.886559] env[68638]: DEBUG oslo_vmware.api [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for the task: (returnval){ [ 946.886559] env[68638]: value = "task-2833929" [ 946.886559] env[68638]: _type = "Task" [ 946.886559] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.895922] env[68638]: DEBUG oslo_vmware.api [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833929, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.915683] env[68638]: DEBUG nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 947.108532] env[68638]: DEBUG nova.compute.manager [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 947.182985] env[68638]: DEBUG nova.scheduler.client.report [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 947.222803] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833928, 'name': PowerOffVM_Task, 'duration_secs': 0.377057} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.223220] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 947.289134] env[68638]: INFO nova.compute.manager [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Detaching volume 3242773b-24c0-4e87-8db6-f2d6f9823068 [ 947.330562] env[68638]: INFO nova.virt.block_device [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Attempting to driver detach volume 3242773b-24c0-4e87-8db6-f2d6f9823068 from mountpoint /dev/sdb [ 947.330562] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Volume detach. Driver type: vmdk {{(pid=68638) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 947.330562] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569954', 'volume_id': '3242773b-24c0-4e87-8db6-f2d6f9823068', 'name': 'volume-3242773b-24c0-4e87-8db6-f2d6f9823068', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '14c1dba5-98cb-4ebd-8e76-60b3f74cca4b', 'attached_at': '', 'detached_at': '', 'volume_id': '3242773b-24c0-4e87-8db6-f2d6f9823068', 'serial': '3242773b-24c0-4e87-8db6-f2d6f9823068'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 947.330771] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea07219-1b6c-47a4-a2a7-c5d9aaaa08a8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.356779] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91745bf-5c22-4166-acfe-c0ffc1a74856 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.367555] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4138111-bbf9-49fc-a53c-42abbbe76ca9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.394199] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.397972] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d38d02dc-299f-4089-8d56-139192706ea3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.408906] env[68638]: DEBUG oslo_vmware.api [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833929, 'name': PowerOffVM_Task, 'duration_secs': 0.291518} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.420323] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 947.420604] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 947.423087] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] The volume has not been displaced from its original location: [datastore2] volume-3242773b-24c0-4e87-8db6-f2d6f9823068/volume-3242773b-24c0-4e87-8db6-f2d6f9823068.vmdk. No consolidation needed. {{(pid=68638) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 947.428791] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Reconfiguring VM instance instance-00000040 to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 947.431887] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.432171] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6024cec2-19e9-447a-aacc-a7e9e97d8b7b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.433728] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5615382-1bc9-44c0-bb12-bc4e38d90ad0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.453982] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 947.453982] env[68638]: value = "task-2833931" [ 947.453982] env[68638]: _type = "Task" [ 947.453982] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.459742] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.463132] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833931, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.519412] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 947.519687] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 947.519857] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Deleting the datastore file [datastore1] 27ff37a6-de93-4a4b-904f-a91fdb8b0aff {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 947.520159] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a705ece9-2ff7-4316-b0c2-bb2e6e33ce80 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.527635] env[68638]: DEBUG oslo_vmware.api [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for the task: (returnval){ [ 947.527635] env[68638]: value = "task-2833932" [ 947.527635] env[68638]: _type = "Task" [ 947.527635] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.535933] env[68638]: DEBUG oslo_vmware.api [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833932, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.636024] env[68638]: DEBUG oslo_concurrency.lockutils [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.690645] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.044s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.692316] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.049s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.692553] env[68638]: DEBUG nova.objects.instance [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Lazy-loading 'resources' on Instance uuid 809416da-af6c-429d-b4b2-5334768aa744 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 947.710796] env[68638]: INFO nova.scheduler.client.report [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Deleted allocations for instance 7617a7b1-3b21-4d38-b090-1d35bc74637b [ 947.968034] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833931, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.042239] env[68638]: DEBUG oslo_vmware.api [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Task: {'id': task-2833932, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.26349} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.042477] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 948.042746] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 948.043057] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 948.043355] env[68638]: INFO nova.compute.manager [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Took 1.18 seconds to destroy the instance on the hypervisor. [ 948.043723] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 948.044018] env[68638]: DEBUG nova.compute.manager [-] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 948.044184] env[68638]: DEBUG nova.network.neutron [-] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 948.224225] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4f8460cd-9651-4e10-885a-a52ad7d38595 tempest-MigrationsAdminTest-1307513966 tempest-MigrationsAdminTest-1307513966-project-member] Lock "7617a7b1-3b21-4d38-b090-1d35bc74637b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.046s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.469073] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833931, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.616683] env[68638]: DEBUG nova.compute.manager [req-a9a3134b-de69-40fc-a552-1cf93ded1a30 req-8f2ec519-b5ab-463a-9e44-60e7c8475534 service nova] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Received event network-vif-deleted-0102f455-ad74-4bf4-a0b8-8a2ec1d59514 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 948.616683] env[68638]: INFO nova.compute.manager [req-a9a3134b-de69-40fc-a552-1cf93ded1a30 req-8f2ec519-b5ab-463a-9e44-60e7c8475534 service nova] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Neutron deleted interface 0102f455-ad74-4bf4-a0b8-8a2ec1d59514; detaching it from the instance and deleting it from the info cache [ 948.617149] env[68638]: DEBUG nova.network.neutron [req-a9a3134b-de69-40fc-a552-1cf93ded1a30 req-8f2ec519-b5ab-463a-9e44-60e7c8475534 service nova] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.619645] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a345c2b-fb48-4957-a88c-4c3c4e6c5135 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.630016] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-591bb84d-9986-4e23-9da5-73d63c7dc5f0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.673097] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-410956c4-2015-4fbd-9c28-a5b93eba96b0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.682130] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac904cc-ac3b-4e30-b69a-ebc2f02a9ae1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.700478] env[68638]: DEBUG nova.compute.provider_tree [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 948.968211] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833931, 'name': ReconfigVM_Task, 'duration_secs': 1.257227} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.968542] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Reconfigured VM instance instance-00000040 to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 948.973786] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-066c8a25-512a-489b-bcf2-17ba23f6b8d5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.992952] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 948.992952] env[68638]: value = "task-2833933" [ 948.992952] env[68638]: _type = "Task" [ 948.992952] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.001430] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833933, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.098289] env[68638]: DEBUG nova.network.neutron [-] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.124230] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dfe9911b-b5d9-4aad-80d5-d66365696f56 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.135560] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c7d7d3f-b49a-452b-a74b-79fa37bfdc37 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.173468] env[68638]: DEBUG nova.compute.manager [req-a9a3134b-de69-40fc-a552-1cf93ded1a30 req-8f2ec519-b5ab-463a-9e44-60e7c8475534 service nova] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Detach interface failed, port_id=0102f455-ad74-4bf4-a0b8-8a2ec1d59514, reason: Instance 27ff37a6-de93-4a4b-904f-a91fdb8b0aff could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 949.226513] env[68638]: ERROR nova.scheduler.client.report [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] [req-cacba15b-e233-4a68-92e6-7ba50ab9b10f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-cacba15b-e233-4a68-92e6-7ba50ab9b10f"}]} [ 949.247836] env[68638]: DEBUG nova.scheduler.client.report [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 949.264221] env[68638]: DEBUG nova.scheduler.client.report [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 949.264491] env[68638]: DEBUG nova.compute.provider_tree [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 949.278494] env[68638]: DEBUG nova.scheduler.client.report [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 949.300111] env[68638]: DEBUG nova.scheduler.client.report [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 949.505919] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833933, 'name': ReconfigVM_Task, 'duration_secs': 0.149678} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.510081] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569954', 'volume_id': '3242773b-24c0-4e87-8db6-f2d6f9823068', 'name': 'volume-3242773b-24c0-4e87-8db6-f2d6f9823068', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '14c1dba5-98cb-4ebd-8e76-60b3f74cca4b', 'attached_at': '', 'detached_at': '', 'volume_id': '3242773b-24c0-4e87-8db6-f2d6f9823068', 'serial': '3242773b-24c0-4e87-8db6-f2d6f9823068'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 949.600769] env[68638]: INFO nova.compute.manager [-] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Took 1.56 seconds to deallocate network for instance. [ 949.744378] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17cfd6b2-72e8-4e23-9593-40e46c5f94ae {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.753289] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1301e8d-71ae-4774-994a-00b5cbbaa883 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.792390] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e80ba80-5531-4882-aa9e-35c2bc6dd7bd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.803489] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-947c009e-d347-4e18-9482-a6579b005716 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.818125] env[68638]: DEBUG nova.compute.provider_tree [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 950.113168] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.354899] env[68638]: DEBUG nova.scheduler.client.report [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 111 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 950.354899] env[68638]: DEBUG nova.compute.provider_tree [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 111 to 112 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 950.354899] env[68638]: DEBUG nova.compute.provider_tree [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 950.509248] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Acquiring lock "1bc685aa-4e88-402f-b581-d179706b12a5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.509248] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Lock "1bc685aa-4e88-402f-b581-d179706b12a5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.509248] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Acquiring lock "1bc685aa-4e88-402f-b581-d179706b12a5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.509248] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Lock "1bc685aa-4e88-402f-b581-d179706b12a5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.509248] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Lock "1bc685aa-4e88-402f-b581-d179706b12a5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.511124] env[68638]: INFO nova.compute.manager [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Terminating instance [ 950.568437] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 950.568784] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bcbf0fe1-3397-4f40-b97a-b1ad0702a2c7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.578997] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 950.578997] env[68638]: value = "task-2833934" [ 950.578997] env[68638]: _type = "Task" [ 950.578997] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.591442] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] VM already powered off {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 950.593651] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Volume detach. Driver type: vmdk {{(pid=68638) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 950.593651] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569954', 'volume_id': '3242773b-24c0-4e87-8db6-f2d6f9823068', 'name': 'volume-3242773b-24c0-4e87-8db6-f2d6f9823068', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '14c1dba5-98cb-4ebd-8e76-60b3f74cca4b', 'attached_at': '', 'detached_at': '', 'volume_id': '3242773b-24c0-4e87-8db6-f2d6f9823068', 'serial': '3242773b-24c0-4e87-8db6-f2d6f9823068'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 950.593651] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19108ec8-0fbd-4c08-9409-9e994b1924d2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.614170] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea09257e-6234-4cff-85f1-57890547f4db {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.623433] env[68638]: WARNING nova.virt.vmwareapi.driver [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 950.624264] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 950.627109] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6868c271-eb3e-458d-aa3b-762cedefff40 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.633019] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 950.633447] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9efcada9-12a6-4a43-81c0-b16e79f0f901 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.709413] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 950.709663] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 950.709918] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Deleting the datastore file [datastore1] 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 950.710156] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad5be5ce-c7c9-47e8-8bfd-999cd72ff2c1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.718315] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 950.718315] env[68638]: value = "task-2833936" [ 950.718315] env[68638]: _type = "Task" [ 950.718315] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.727413] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833936, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.863179] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.169s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.865459] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.905s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.867270] env[68638]: INFO nova.compute.claims [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 950.908681] env[68638]: INFO nova.scheduler.client.report [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Deleted allocations for instance 809416da-af6c-429d-b4b2-5334768aa744 [ 951.016323] env[68638]: DEBUG nova.compute.manager [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 951.016323] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 951.016476] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abeda901-edbc-4097-946f-c74f563c773f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.029384] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 951.029384] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2d7f6a90-602e-4f0d-aaca-2aa615158160 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.034135] env[68638]: DEBUG oslo_vmware.api [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Waiting for the task: (returnval){ [ 951.034135] env[68638]: value = "task-2833937" [ 951.034135] env[68638]: _type = "Task" [ 951.034135] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.044320] env[68638]: DEBUG oslo_vmware.api [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Task: {'id': task-2833937, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.235027] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833936, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139304} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.235027] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 951.235027] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 951.235027] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 951.423203] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c4ab31a0-596b-403e-af3b-1f0d0b6503d1 tempest-SecurityGroupsTestJSON-338703272 tempest-SecurityGroupsTestJSON-338703272-project-member] Lock "809416da-af6c-429d-b4b2-5334768aa744" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.347s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.545136] env[68638]: DEBUG oslo_vmware.api [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Task: {'id': task-2833937, 'name': PowerOffVM_Task, 'duration_secs': 0.21671} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.545136] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 951.545136] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 951.545660] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-055529af-623a-40c6-89a7-e75eca955a7d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.616639] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 951.616899] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 951.617220] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Deleting the datastore file [datastore1] 1bc685aa-4e88-402f-b581-d179706b12a5 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 951.617528] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-282eb3a8-7462-4d12-88ce-65a48a9b3469 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.629912] env[68638]: DEBUG oslo_vmware.api [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Waiting for the task: (returnval){ [ 951.629912] env[68638]: value = "task-2833939" [ 951.629912] env[68638]: _type = "Task" [ 951.629912] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.637553] env[68638]: DEBUG oslo_vmware.api [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Task: {'id': task-2833939, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.741240] env[68638]: INFO nova.virt.block_device [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Booting with volume 3242773b-24c0-4e87-8db6-f2d6f9823068 at /dev/sdb [ 951.790393] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0cf9c81b-5ca3-444f-8355-185ec70f3358 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.801207] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bcf7ecf-9e9c-4db7-956a-d62c8835cf26 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.838833] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ed411824-0f37-4948-8cc7-dbe6c2992121 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.848755] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eec1fa6-99e5-4a1f-bad2-9ce103e4c47b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.882269] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e5d716-4862-495c-9762-33f5a777da24 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.891817] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f9c5640-6c47-4cf9-b67c-847ad50485e4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.907359] env[68638]: DEBUG nova.virt.block_device [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Updating existing volume attachment record: b66f104c-1fc4-4348-89f3-db5ec34c3549 {{(pid=68638) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 952.147899] env[68638]: DEBUG oslo_vmware.api [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Task: {'id': task-2833939, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142105} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.148200] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 952.148376] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 952.148553] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 952.148723] env[68638]: INFO nova.compute.manager [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Took 1.13 seconds to destroy the instance on the hypervisor. [ 952.148967] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 952.149302] env[68638]: DEBUG nova.compute.manager [-] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 952.149351] env[68638]: DEBUG nova.network.neutron [-] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 952.319920] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-830adbe2-ebfc-4236-a13b-c2797ec1d6ec {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.328727] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c87d3397-38c2-4c6f-b957-7d69334fbbae {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.363544] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d3e6da7-0edf-4c68-b2b4-223d88817c0e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.371992] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f011ed92-3806-4dd6-ac73-b2080b9f636b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.386812] env[68638]: DEBUG nova.compute.provider_tree [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 952.893161] env[68638]: DEBUG nova.scheduler.client.report [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 952.987864] env[68638]: DEBUG nova.compute.manager [req-d5eb84c1-dae4-4dde-b3a3-d55790cc1d0e req-b2753417-0f60-432d-9b3f-adc5954038b6 service nova] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Received event network-vif-deleted-99d48199-ae21-4f20-8c41-f96a59bcf89b {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 952.987864] env[68638]: INFO nova.compute.manager [req-d5eb84c1-dae4-4dde-b3a3-d55790cc1d0e req-b2753417-0f60-432d-9b3f-adc5954038b6 service nova] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Neutron deleted interface 99d48199-ae21-4f20-8c41-f96a59bcf89b; detaching it from the instance and deleting it from the info cache [ 952.987864] env[68638]: DEBUG nova.network.neutron [req-d5eb84c1-dae4-4dde-b3a3-d55790cc1d0e req-b2753417-0f60-432d-9b3f-adc5954038b6 service nova] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.402362] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.534s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.402362] env[68638]: DEBUG nova.compute.manager [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 953.403761] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.194s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.405503] env[68638]: INFO nova.compute.claims [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 953.466083] env[68638]: DEBUG nova.network.neutron [-] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.494215] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3168db2c-0e65-4bc9-8d7e-299c63c89b29 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.505503] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75fc7e19-1e3b-4ab3-8d77-880c729d2ea2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.545018] env[68638]: DEBUG nova.compute.manager [req-d5eb84c1-dae4-4dde-b3a3-d55790cc1d0e req-b2753417-0f60-432d-9b3f-adc5954038b6 service nova] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Detach interface failed, port_id=99d48199-ae21-4f20-8c41-f96a59bcf89b, reason: Instance 1bc685aa-4e88-402f-b581-d179706b12a5 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 953.915676] env[68638]: DEBUG nova.compute.utils [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 953.920633] env[68638]: DEBUG nova.compute.manager [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 953.920859] env[68638]: DEBUG nova.network.neutron [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 953.970575] env[68638]: INFO nova.compute.manager [-] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Took 1.82 seconds to deallocate network for instance. [ 953.977314] env[68638]: DEBUG nova.policy [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b5dc492be0cd4ce999d61eb28ac3b2e5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ee5d59c43e974d04ba56981f2716ff60', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 954.054670] env[68638]: DEBUG nova.virt.hardware [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 954.054823] env[68638]: DEBUG nova.virt.hardware [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 954.054978] env[68638]: DEBUG nova.virt.hardware [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 954.055183] env[68638]: DEBUG nova.virt.hardware [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 954.055319] env[68638]: DEBUG nova.virt.hardware [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 954.055465] env[68638]: DEBUG nova.virt.hardware [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 954.056964] env[68638]: DEBUG nova.virt.hardware [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 954.060015] env[68638]: DEBUG nova.virt.hardware [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 954.060015] env[68638]: DEBUG nova.virt.hardware [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 954.060015] env[68638]: DEBUG nova.virt.hardware [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 954.060015] env[68638]: DEBUG nova.virt.hardware [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 954.060015] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed09f608-283b-4da3-85ea-44795dbf9258 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.068659] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e73f89b-7237-4fe7-902e-7fe009864505 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.083651] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:46:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69cfa7ba-6989-4d75-9495-97b5fea00c3c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 954.092574] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 954.093044] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 954.093288] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1b992340-132b-47bc-9659-382acfd585bc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.122279] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 954.122279] env[68638]: value = "task-2833940" [ 954.122279] env[68638]: _type = "Task" [ 954.122279] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.132017] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833940, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.390933] env[68638]: DEBUG nova.network.neutron [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Successfully created port: d0023f1c-323c-4f1c-a82c-45ad56565341 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 954.422910] env[68638]: DEBUG nova.compute.manager [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 954.487367] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 954.633592] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833940, 'name': CreateVM_Task, 'duration_secs': 0.394439} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.633816] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 954.634546] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.634793] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.635191] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 954.635496] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a813c71-9aa8-4628-a0dd-fe44003ec6b8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.647348] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 954.647348] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]525a4074-a5a0-b9e6-d349-2c521ff63368" [ 954.647348] env[68638]: _type = "Task" [ 954.647348] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.657932] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525a4074-a5a0-b9e6-d349-2c521ff63368, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.904456] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d68948-a750-46fd-afb3-f4759bc579fa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.912848] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d8980a5-35d3-4d21-824c-753194defa04 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.957120] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79c2a771-d31a-4c9a-b3ac-1e243dd35f04 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.963992] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370a127f-038c-4d7e-a126-04abb2e3a473 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.980819] env[68638]: DEBUG nova.compute.provider_tree [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 955.165292] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525a4074-a5a0-b9e6-d349-2c521ff63368, 'name': SearchDatastore_Task, 'duration_secs': 0.011985} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.165638] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.165909] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 955.166167] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.166315] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 955.166493] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 955.166808] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a586b999-e894-4d71-952e-e38e52f8d78f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.177400] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 955.177724] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 955.178354] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a0af250-7683-4041-b926-76734085585b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.186533] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 955.186533] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]520fb927-1a4a-3892-a8f1-e3d68913f3b6" [ 955.186533] env[68638]: _type = "Task" [ 955.186533] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.196883] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]520fb927-1a4a-3892-a8f1-e3d68913f3b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.455724] env[68638]: DEBUG nova.compute.manager [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 955.484582] env[68638]: DEBUG nova.scheduler.client.report [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 955.490560] env[68638]: DEBUG nova.virt.hardware [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 955.490839] env[68638]: DEBUG nova.virt.hardware [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 955.491008] env[68638]: DEBUG nova.virt.hardware [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 955.491333] env[68638]: DEBUG nova.virt.hardware [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 955.491497] env[68638]: DEBUG nova.virt.hardware [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 955.491648] env[68638]: DEBUG nova.virt.hardware [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 955.491853] env[68638]: DEBUG nova.virt.hardware [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 955.492018] env[68638]: DEBUG nova.virt.hardware [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 955.492186] env[68638]: DEBUG nova.virt.hardware [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 955.492349] env[68638]: DEBUG nova.virt.hardware [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 955.492582] env[68638]: DEBUG nova.virt.hardware [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 955.493481] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-533e6890-e667-464c-af37-8b75c8c4bccf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.503875] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a2d1aa-91b1-4365-8a41-adb29c6ceae0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.623469] env[68638]: DEBUG oslo_concurrency.lockutils [None req-05d7e731-f2ca-4226-9c7d-a45e9e2a833e tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "90c192bd-b823-414c-b793-260eacc9904f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 955.623813] env[68638]: DEBUG oslo_concurrency.lockutils [None req-05d7e731-f2ca-4226-9c7d-a45e9e2a833e tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "90c192bd-b823-414c-b793-260eacc9904f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.701227] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]520fb927-1a4a-3892-a8f1-e3d68913f3b6, 'name': SearchDatastore_Task, 'duration_secs': 0.009602} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.701227] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3cba82fa-ac4f-4bef-bb44-aefb9ca6d1eb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.709761] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 955.709761] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e53330-d3fe-6a7f-6581-06a4d042277c" [ 955.709761] env[68638]: _type = "Task" [ 955.709761] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.718229] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e53330-d3fe-6a7f-6581-06a4d042277c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.929421] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Acquiring lock "f0598d8d-09a9-44ce-b4d7-cb8830a84b94" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 955.929655] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Lock "f0598d8d-09a9-44ce-b4d7-cb8830a84b94" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.998357] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.594s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.999224] env[68638]: DEBUG nova.compute.manager [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 956.002097] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb65ada5-a2eb-4f0a-a5bc-85a2e0c834cc tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.046s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.002097] env[68638]: DEBUG nova.objects.instance [None req-cb65ada5-a2eb-4f0a-a5bc-85a2e0c834cc tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Lazy-loading 'resources' on Instance uuid 02894a47-59b1-475b-b934-c8d0b6dabc5b {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 956.128182] env[68638]: DEBUG nova.compute.utils [None req-05d7e731-f2ca-4226-9c7d-a45e9e2a833e tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 956.136008] env[68638]: DEBUG nova.network.neutron [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Successfully updated port: d0023f1c-323c-4f1c-a82c-45ad56565341 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 956.220598] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e53330-d3fe-6a7f-6581-06a4d042277c, 'name': SearchDatastore_Task, 'duration_secs': 0.009926} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.220924] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 956.221330] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b/14c1dba5-98cb-4ebd-8e76-60b3f74cca4b.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 956.221646] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-06e9f577-33e7-4db0-b39d-68c20cb663b9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.228848] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 956.228848] env[68638]: value = "task-2833941" [ 956.228848] env[68638]: _type = "Task" [ 956.228848] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.237492] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833941, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.284521] env[68638]: DEBUG nova.compute.manager [req-5a0dcb6e-8f6c-4518-b20a-e16d55e3ce34 req-53dd4565-364b-4dfb-9a37-c1574094c59f service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Received event network-vif-plugged-d0023f1c-323c-4f1c-a82c-45ad56565341 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 956.284521] env[68638]: DEBUG oslo_concurrency.lockutils [req-5a0dcb6e-8f6c-4518-b20a-e16d55e3ce34 req-53dd4565-364b-4dfb-9a37-c1574094c59f service nova] Acquiring lock "0249ffb9-82ed-44db-bb20-e619eaa176dd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.284888] env[68638]: DEBUG oslo_concurrency.lockutils [req-5a0dcb6e-8f6c-4518-b20a-e16d55e3ce34 req-53dd4565-364b-4dfb-9a37-c1574094c59f service nova] Lock "0249ffb9-82ed-44db-bb20-e619eaa176dd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.285411] env[68638]: DEBUG oslo_concurrency.lockutils [req-5a0dcb6e-8f6c-4518-b20a-e16d55e3ce34 req-53dd4565-364b-4dfb-9a37-c1574094c59f service nova] Lock "0249ffb9-82ed-44db-bb20-e619eaa176dd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.285411] env[68638]: DEBUG nova.compute.manager [req-5a0dcb6e-8f6c-4518-b20a-e16d55e3ce34 req-53dd4565-364b-4dfb-9a37-c1574094c59f service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] No waiting events found dispatching network-vif-plugged-d0023f1c-323c-4f1c-a82c-45ad56565341 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 956.285701] env[68638]: WARNING nova.compute.manager [req-5a0dcb6e-8f6c-4518-b20a-e16d55e3ce34 req-53dd4565-364b-4dfb-9a37-c1574094c59f service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Received unexpected event network-vif-plugged-d0023f1c-323c-4f1c-a82c-45ad56565341 for instance with vm_state building and task_state spawning. [ 956.435539] env[68638]: DEBUG nova.compute.manager [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 956.506384] env[68638]: DEBUG nova.compute.utils [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 956.509055] env[68638]: DEBUG nova.compute.manager [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 956.509246] env[68638]: DEBUG nova.network.neutron [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 956.564763] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Acquiring lock "fd329f9d-daf3-47ff-9c48-e1355fc012f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.564763] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Lock "fd329f9d-daf3-47ff-9c48-e1355fc012f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.574834] env[68638]: DEBUG nova.policy [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5b140aa82f044f108521ab8c0d28c0a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3e5757d1f74492481048df4a29032ca', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 956.630726] env[68638]: DEBUG oslo_concurrency.lockutils [None req-05d7e731-f2ca-4226-9c7d-a45e9e2a833e tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "90c192bd-b823-414c-b793-260eacc9904f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.641875] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "refresh_cache-0249ffb9-82ed-44db-bb20-e619eaa176dd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.641875] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquired lock "refresh_cache-0249ffb9-82ed-44db-bb20-e619eaa176dd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 956.641875] env[68638]: DEBUG nova.network.neutron [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 956.741591] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833941, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493173} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.742133] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b/14c1dba5-98cb-4ebd-8e76-60b3f74cca4b.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 956.742562] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 956.743728] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-839c9024-45a5-4b60-8fd1-ab666c3e6755 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.755143] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 956.755143] env[68638]: value = "task-2833942" [ 956.755143] env[68638]: _type = "Task" [ 956.755143] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.772159] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833942, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.960785] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.974354] env[68638]: DEBUG nova.network.neutron [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Successfully created port: 7200b9ae-be3f-4868-8707-b645c20bc18e {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 956.982996] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c13752f-f8df-4cdd-82ec-0093890d4096 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.991157] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e563b494-6565-463e-8856-461821be4bc0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.024482] env[68638]: DEBUG nova.compute.manager [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 957.028484] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd92376-7524-4661-97b9-b55bd2405e98 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.040560] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01fecfe6-875e-4d2a-b051-209d48e8d11d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.055480] env[68638]: DEBUG nova.compute.provider_tree [None req-cb65ada5-a2eb-4f0a-a5bc-85a2e0c834cc tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 957.067874] env[68638]: DEBUG nova.compute.manager [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 957.205055] env[68638]: DEBUG nova.network.neutron [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 957.268797] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833942, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069733} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.270194] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 957.270344] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c80668-65ee-4e53-b8e4-4a9bb1fa04ef {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.297137] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b/14c1dba5-98cb-4ebd-8e76-60b3f74cca4b.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 957.299755] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9dccfb8-cd30-4350-b8db-380c1f05b13b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.322399] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 957.322399] env[68638]: value = "task-2833943" [ 957.322399] env[68638]: _type = "Task" [ 957.322399] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.332168] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833943, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.489524] env[68638]: DEBUG nova.network.neutron [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Updating instance_info_cache with network_info: [{"id": "d0023f1c-323c-4f1c-a82c-45ad56565341", "address": "fa:16:3e:33:9f:b3", "network": {"id": "4723bf0f-71b1-4997-b6ce-06f7dbda84df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-488516254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee5d59c43e974d04ba56981f2716ff60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0023f1c-32", "ovs_interfaceid": "d0023f1c-323c-4f1c-a82c-45ad56565341", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.558936] env[68638]: DEBUG nova.scheduler.client.report [None req-cb65ada5-a2eb-4f0a-a5bc-85a2e0c834cc tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 957.596927] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.699217] env[68638]: DEBUG oslo_concurrency.lockutils [None req-05d7e731-f2ca-4226-9c7d-a45e9e2a833e tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "90c192bd-b823-414c-b793-260eacc9904f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.699637] env[68638]: DEBUG oslo_concurrency.lockutils [None req-05d7e731-f2ca-4226-9c7d-a45e9e2a833e tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "90c192bd-b823-414c-b793-260eacc9904f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.699738] env[68638]: INFO nova.compute.manager [None req-05d7e731-f2ca-4226-9c7d-a45e9e2a833e tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Attaching volume eb98862f-bff4-43a3-b7cb-9025589cf53e to /dev/sdb [ 957.754111] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-305e11fc-d3a8-4522-8795-1bde384a1dcf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.764026] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751bcc14-17a1-41bf-96d9-a2e2bcce6e3b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.780321] env[68638]: DEBUG nova.virt.block_device [None req-05d7e731-f2ca-4226-9c7d-a45e9e2a833e tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Updating existing volume attachment record: 2703e693-b913-4ee1-bc59-92915511b768 {{(pid=68638) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 957.832568] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833943, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.998034] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Releasing lock "refresh_cache-0249ffb9-82ed-44db-bb20-e619eaa176dd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.998492] env[68638]: DEBUG nova.compute.manager [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Instance network_info: |[{"id": "d0023f1c-323c-4f1c-a82c-45ad56565341", "address": "fa:16:3e:33:9f:b3", "network": {"id": "4723bf0f-71b1-4997-b6ce-06f7dbda84df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-488516254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee5d59c43e974d04ba56981f2716ff60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0023f1c-32", "ovs_interfaceid": "d0023f1c-323c-4f1c-a82c-45ad56565341", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 957.998928] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:9f:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd1da5fc2-0280-4f76-ac97-20ea4bc7bb16', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd0023f1c-323c-4f1c-a82c-45ad56565341', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 958.006710] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Creating folder: Project (ee5d59c43e974d04ba56981f2716ff60). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 958.006985] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-05304ce9-c4f4-4a97-b528-3a0e7cf269eb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.019273] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Created folder: Project (ee5d59c43e974d04ba56981f2716ff60) in parent group-v569734. [ 958.019532] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Creating folder: Instances. Parent ref: group-v569956. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 958.020094] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-53e45a34-9edb-4ac2-99c3-86a499d4d4cf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.031084] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Created folder: Instances in parent group-v569956. [ 958.031376] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 958.031872] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 958.031872] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7cd1959a-4920-4af9-8ca9-0fd2b2b466da {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.047774] env[68638]: DEBUG nova.compute.manager [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 958.055634] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 958.055634] env[68638]: value = "task-2833947" [ 958.055634] env[68638]: _type = "Task" [ 958.055634] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.064675] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb65ada5-a2eb-4f0a-a5bc-85a2e0c834cc tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.063s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.066761] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833947, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.067217] env[68638]: DEBUG oslo_concurrency.lockutils [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.792s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.067884] env[68638]: DEBUG nova.objects.instance [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lazy-loading 'resources' on Instance uuid bb86aabd-129d-4c14-9db1-6676a5e7b9fa {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 958.083874] env[68638]: DEBUG nova.virt.hardware [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='066fd47e5d4ef4ed3eb0c66f376f4b5e',container_format='bare',created_at=2025-03-07T02:33:42Z,direct_url=,disk_format='vmdk',id=54968b0f-571c-4b4c-be55-3b4b458fd6b8,min_disk=1,min_ram=0,name='tempest-test-snap-1828216680',owner='d3e5757d1f74492481048df4a29032ca',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-03-07T02:33:58Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 958.084158] env[68638]: DEBUG nova.virt.hardware [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 958.088040] env[68638]: DEBUG nova.virt.hardware [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 958.088040] env[68638]: DEBUG nova.virt.hardware [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 958.088040] env[68638]: DEBUG nova.virt.hardware [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 958.088040] env[68638]: DEBUG nova.virt.hardware [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 958.088040] env[68638]: DEBUG nova.virt.hardware [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 958.088040] env[68638]: DEBUG nova.virt.hardware [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 958.088040] env[68638]: DEBUG nova.virt.hardware [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 958.088040] env[68638]: DEBUG nova.virt.hardware [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 958.088040] env[68638]: DEBUG nova.virt.hardware [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 958.088040] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a46f90fa-e120-43aa-9459-adf9a1cc2856 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.099780] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794e8b81-eb7c-4e11-97c7-26f256c428ba {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.142860] env[68638]: DEBUG oslo_concurrency.lockutils [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Acquiring lock "da886efd-bca9-45aa-abcc-13832c66a90c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 958.143429] env[68638]: DEBUG oslo_concurrency.lockutils [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Lock "da886efd-bca9-45aa-abcc-13832c66a90c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.315104] env[68638]: DEBUG nova.compute.manager [req-18f37f07-343f-4aec-a8b7-1a8cab474e00 req-07392b2e-6ef9-4da8-b339-04b5af157c86 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Received event network-changed-d0023f1c-323c-4f1c-a82c-45ad56565341 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 958.315104] env[68638]: DEBUG nova.compute.manager [req-18f37f07-343f-4aec-a8b7-1a8cab474e00 req-07392b2e-6ef9-4da8-b339-04b5af157c86 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Refreshing instance network info cache due to event network-changed-d0023f1c-323c-4f1c-a82c-45ad56565341. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 958.315104] env[68638]: DEBUG oslo_concurrency.lockutils [req-18f37f07-343f-4aec-a8b7-1a8cab474e00 req-07392b2e-6ef9-4da8-b339-04b5af157c86 service nova] Acquiring lock "refresh_cache-0249ffb9-82ed-44db-bb20-e619eaa176dd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.315104] env[68638]: DEBUG oslo_concurrency.lockutils [req-18f37f07-343f-4aec-a8b7-1a8cab474e00 req-07392b2e-6ef9-4da8-b339-04b5af157c86 service nova] Acquired lock "refresh_cache-0249ffb9-82ed-44db-bb20-e619eaa176dd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.315104] env[68638]: DEBUG nova.network.neutron [req-18f37f07-343f-4aec-a8b7-1a8cab474e00 req-07392b2e-6ef9-4da8-b339-04b5af157c86 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Refreshing network info cache for port d0023f1c-323c-4f1c-a82c-45ad56565341 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 958.333811] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833943, 'name': ReconfigVM_Task, 'duration_secs': 0.532021} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.334138] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b/14c1dba5-98cb-4ebd-8e76-60b3f74cca4b.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 958.335687] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_type': 'disk', 'disk_bus': None, 'encryption_secret_uuid': None, 'encryption_format': None, 'encryption_options': None, 'encrypted': False, 'guest_format': None, 'boot_index': 0, 'device_name': '/dev/sda', 'size': 0, 'image_id': 'ef1ae417-fdc1-452d-9e5d-ced4149ebfe9'}], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sdb', 'attachment_id': 'b66f104c-1fc4-4348-89f3-db5ec34c3549', 'device_type': None, 'disk_bus': None, 'delete_on_termination': False, 'guest_format': None, 'boot_index': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569954', 'volume_id': '3242773b-24c0-4e87-8db6-f2d6f9823068', 'name': 'volume-3242773b-24c0-4e87-8db6-f2d6f9823068', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '14c1dba5-98cb-4ebd-8e76-60b3f74cca4b', 'attached_at': '', 'detached_at': '', 'volume_id': '3242773b-24c0-4e87-8db6-f2d6f9823068', 'serial': '3242773b-24c0-4e87-8db6-f2d6f9823068'}, 'volume_type': None}], 'swap': None} {{(pid=68638) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 958.335932] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Volume attach. Driver type: vmdk {{(pid=68638) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 958.336169] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569954', 'volume_id': '3242773b-24c0-4e87-8db6-f2d6f9823068', 'name': 'volume-3242773b-24c0-4e87-8db6-f2d6f9823068', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '14c1dba5-98cb-4ebd-8e76-60b3f74cca4b', 'attached_at': '', 'detached_at': '', 'volume_id': '3242773b-24c0-4e87-8db6-f2d6f9823068', 'serial': '3242773b-24c0-4e87-8db6-f2d6f9823068'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 958.336950] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e98cc3a-c600-44ba-9097-4b42234092ff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.354844] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa62df78-2dc7-4e8d-9684-dc1eb8772410 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.384195] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] volume-3242773b-24c0-4e87-8db6-f2d6f9823068/volume-3242773b-24c0-4e87-8db6-f2d6f9823068.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 958.384574] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc9f8070-ec97-4c45-8ca3-75a2ade552be {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.409695] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 958.409695] env[68638]: value = "task-2833950" [ 958.409695] env[68638]: _type = "Task" [ 958.409695] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.420872] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833950, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.567274] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833947, 'name': CreateVM_Task, 'duration_secs': 0.4858} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.567464] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 958.568250] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.568414] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.568803] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 958.569087] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a86e7fd3-96e2-4c19-b554-dac712347e73 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.583734] env[68638]: DEBUG oslo_vmware.api [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 958.583734] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]529d4b04-4dac-ef79-7af6-d2eaf5c58198" [ 958.583734] env[68638]: _type = "Task" [ 958.583734] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.584360] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cb65ada5-a2eb-4f0a-a5bc-85a2e0c834cc tempest-ServerActionsV293TestJSON-2012944530 tempest-ServerActionsV293TestJSON-2012944530-project-member] Lock "02894a47-59b1-475b-b934-c8d0b6dabc5b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.357s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.594959] env[68638]: DEBUG oslo_vmware.api [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]529d4b04-4dac-ef79-7af6-d2eaf5c58198, 'name': SearchDatastore_Task, 'duration_secs': 0.010613} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.595600] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 958.595913] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 958.596178] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.596334] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.596514] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 958.596787] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eb56e09b-bc00-475a-a2cd-9dfdf810fe76 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.607194] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 958.607385] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 958.611214] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f19913bb-f63d-4b84-951e-4cce98d654d6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.617785] env[68638]: DEBUG oslo_vmware.api [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 958.617785] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]522dc855-8130-4a00-da51-599808f6a1f7" [ 958.617785] env[68638]: _type = "Task" [ 958.617785] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.626243] env[68638]: DEBUG oslo_vmware.api [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]522dc855-8130-4a00-da51-599808f6a1f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.649023] env[68638]: DEBUG nova.compute.manager [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 958.847519] env[68638]: DEBUG nova.network.neutron [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Successfully updated port: 7200b9ae-be3f-4868-8707-b645c20bc18e {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 958.924301] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833950, 'name': ReconfigVM_Task, 'duration_secs': 0.330164} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.925142] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Reconfigured VM instance instance-00000040 to attach disk [datastore2] volume-3242773b-24c0-4e87-8db6-f2d6f9823068/volume-3242773b-24c0-4e87-8db6-f2d6f9823068.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 958.939419] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e64a405-15d7-4f25-93c8-86d3df074972 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.965021] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 958.965021] env[68638]: value = "task-2833951" [ 958.965021] env[68638]: _type = "Task" [ 958.965021] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.973823] env[68638]: DEBUG oslo_concurrency.lockutils [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Acquiring lock "039edcf8-7908-4be4-8bd3-0b55545b6f7b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 958.974301] env[68638]: DEBUG oslo_concurrency.lockutils [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Lock "039edcf8-7908-4be4-8bd3-0b55545b6f7b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.974630] env[68638]: DEBUG oslo_concurrency.lockutils [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Acquiring lock "039edcf8-7908-4be4-8bd3-0b55545b6f7b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 958.974950] env[68638]: DEBUG oslo_concurrency.lockutils [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Lock "039edcf8-7908-4be4-8bd3-0b55545b6f7b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.975250] env[68638]: DEBUG oslo_concurrency.lockutils [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Lock "039edcf8-7908-4be4-8bd3-0b55545b6f7b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.980386] env[68638]: INFO nova.compute.manager [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Terminating instance [ 958.986896] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833951, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.047774] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a50802-c470-40b5-a846-04c3b1fb5423 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.056571] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c4748a6-bb06-41bd-92ec-3e2cc57be737 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.094391] env[68638]: DEBUG nova.network.neutron [req-18f37f07-343f-4aec-a8b7-1a8cab474e00 req-07392b2e-6ef9-4da8-b339-04b5af157c86 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Updated VIF entry in instance network info cache for port d0023f1c-323c-4f1c-a82c-45ad56565341. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 959.094801] env[68638]: DEBUG nova.network.neutron [req-18f37f07-343f-4aec-a8b7-1a8cab474e00 req-07392b2e-6ef9-4da8-b339-04b5af157c86 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Updating instance_info_cache with network_info: [{"id": "d0023f1c-323c-4f1c-a82c-45ad56565341", "address": "fa:16:3e:33:9f:b3", "network": {"id": "4723bf0f-71b1-4997-b6ce-06f7dbda84df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-488516254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee5d59c43e974d04ba56981f2716ff60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0023f1c-32", "ovs_interfaceid": "d0023f1c-323c-4f1c-a82c-45ad56565341", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.096520] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4161fc7e-96e2-46ac-99cf-e8fb29bf1feb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.105450] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c588bf6e-4e56-4c1c-8e23-d285c4e25d44 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.120449] env[68638]: DEBUG nova.compute.provider_tree [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 959.131108] env[68638]: DEBUG oslo_vmware.api [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]522dc855-8130-4a00-da51-599808f6a1f7, 'name': SearchDatastore_Task, 'duration_secs': 0.011318} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.132326] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57a448c2-e9d3-4539-9cfd-66cab700daf2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.138575] env[68638]: DEBUG oslo_vmware.api [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 959.138575] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e59794-2c89-2177-1de1-9044b804b065" [ 959.138575] env[68638]: _type = "Task" [ 959.138575] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.146811] env[68638]: DEBUG oslo_vmware.api [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e59794-2c89-2177-1de1-9044b804b065, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.165643] env[68638]: DEBUG oslo_concurrency.lockutils [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.350568] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "refresh_cache-61b9bce5-6a3e-4149-a759-d08e2e2301ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.350746] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquired lock "refresh_cache-61b9bce5-6a3e-4149-a759-d08e2e2301ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 959.350910] env[68638]: DEBUG nova.network.neutron [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 959.473732] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833951, 'name': ReconfigVM_Task, 'duration_secs': 0.147011} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.474092] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569954', 'volume_id': '3242773b-24c0-4e87-8db6-f2d6f9823068', 'name': 'volume-3242773b-24c0-4e87-8db6-f2d6f9823068', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '14c1dba5-98cb-4ebd-8e76-60b3f74cca4b', 'attached_at': '', 'detached_at': '', 'volume_id': '3242773b-24c0-4e87-8db6-f2d6f9823068', 'serial': '3242773b-24c0-4e87-8db6-f2d6f9823068'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 959.474678] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bd5bdce4-cceb-44f1-a0e4-23fe85dd477e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.481381] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 959.481381] env[68638]: value = "task-2833952" [ 959.481381] env[68638]: _type = "Task" [ 959.481381] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.488683] env[68638]: DEBUG nova.compute.manager [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 959.488975] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 959.489552] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-794eca5e-ed59-40e1-92e5-fa4ead056c67 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.493899] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833952, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.500962] env[68638]: DEBUG oslo_vmware.api [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Waiting for the task: (returnval){ [ 959.500962] env[68638]: value = "task-2833953" [ 959.500962] env[68638]: _type = "Task" [ 959.500962] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.509389] env[68638]: DEBUG oslo_vmware.api [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Task: {'id': task-2833953, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.600731] env[68638]: DEBUG oslo_concurrency.lockutils [req-18f37f07-343f-4aec-a8b7-1a8cab474e00 req-07392b2e-6ef9-4da8-b339-04b5af157c86 service nova] Releasing lock "refresh_cache-0249ffb9-82ed-44db-bb20-e619eaa176dd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.626442] env[68638]: DEBUG nova.scheduler.client.report [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 959.651641] env[68638]: DEBUG oslo_vmware.api [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e59794-2c89-2177-1de1-9044b804b065, 'name': SearchDatastore_Task, 'duration_secs': 0.009553} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.651906] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.652180] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 0249ffb9-82ed-44db-bb20-e619eaa176dd/0249ffb9-82ed-44db-bb20-e619eaa176dd.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 959.652435] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d14098aa-3104-48f7-85d1-97016c8b3057 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.660859] env[68638]: DEBUG oslo_vmware.api [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 959.660859] env[68638]: value = "task-2833954" [ 959.660859] env[68638]: _type = "Task" [ 959.660859] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.670882] env[68638]: DEBUG oslo_vmware.api [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2833954, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.882774] env[68638]: DEBUG nova.network.neutron [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 959.992429] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833952, 'name': Rename_Task, 'duration_secs': 0.224884} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.992761] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 959.993060] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e4309de3-e890-45bf-8f43-5d65e29a9dc9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.002331] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 960.002331] env[68638]: value = "task-2833955" [ 960.002331] env[68638]: _type = "Task" [ 960.002331] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.015013] env[68638]: DEBUG oslo_vmware.api [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Task: {'id': task-2833953, 'name': PowerOffVM_Task, 'duration_secs': 0.219869} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.019978] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 960.020288] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Volume detach. Driver type: vmdk {{(pid=68638) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 960.020506] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569817', 'volume_id': 'a3de97f5-fa56-44b0-81e5-346fb44dddb0', 'name': 'volume-a3de97f5-fa56-44b0-81e5-346fb44dddb0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '039edcf8-7908-4be4-8bd3-0b55545b6f7b', 'attached_at': '', 'detached_at': '', 'volume_id': 'a3de97f5-fa56-44b0-81e5-346fb44dddb0', 'serial': 'a3de97f5-fa56-44b0-81e5-346fb44dddb0'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 960.020833] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833955, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.021743] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1efbde7-4951-451c-b1c2-6364da6f6d3e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.044140] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b7eaf6-2eac-4726-8722-8893fee0384a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.053629] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d17dc574-ccbc-439e-a266-6ec7ced16bc2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.075156] env[68638]: DEBUG nova.network.neutron [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Updating instance_info_cache with network_info: [{"id": "7200b9ae-be3f-4868-8707-b645c20bc18e", "address": "fa:16:3e:fd:43:21", "network": {"id": "104a324f-fd5a-4c74-9a7a-6126392ea10c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1310127541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3e5757d1f74492481048df4a29032ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7200b9ae-be", "ovs_interfaceid": "7200b9ae-be3f-4868-8707-b645c20bc18e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.076961] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f69bfd-9f2d-41bd-9126-d18fad577bb2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.098119] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] The volume has not been displaced from its original location: [datastore2] volume-a3de97f5-fa56-44b0-81e5-346fb44dddb0/volume-a3de97f5-fa56-44b0-81e5-346fb44dddb0.vmdk. No consolidation needed. {{(pid=68638) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 960.103536] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Reconfiguring VM instance instance-00000030 to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 960.104592] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd18aa8c-c383-4717-a82f-284227f3c8e7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.126233] env[68638]: DEBUG oslo_vmware.api [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Waiting for the task: (returnval){ [ 960.126233] env[68638]: value = "task-2833956" [ 960.126233] env[68638]: _type = "Task" [ 960.126233] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.130578] env[68638]: DEBUG oslo_concurrency.lockutils [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.063s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.133000] env[68638]: DEBUG oslo_concurrency.lockutils [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.145s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.134712] env[68638]: INFO nova.compute.claims [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 960.145564] env[68638]: DEBUG oslo_vmware.api [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Task: {'id': task-2833956, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.158115] env[68638]: INFO nova.scheduler.client.report [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Deleted allocations for instance bb86aabd-129d-4c14-9db1-6676a5e7b9fa [ 960.171143] env[68638]: DEBUG oslo_vmware.api [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2833954, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.340502] env[68638]: DEBUG nova.compute.manager [req-8cc1d937-2dd4-4a81-8a7b-c7923278eafd req-2493b694-b9ec-4999-8a99-cec005e8c26c service nova] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Received event network-vif-plugged-7200b9ae-be3f-4868-8707-b645c20bc18e {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 960.340502] env[68638]: DEBUG oslo_concurrency.lockutils [req-8cc1d937-2dd4-4a81-8a7b-c7923278eafd req-2493b694-b9ec-4999-8a99-cec005e8c26c service nova] Acquiring lock "61b9bce5-6a3e-4149-a759-d08e2e2301ee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.340901] env[68638]: DEBUG oslo_concurrency.lockutils [req-8cc1d937-2dd4-4a81-8a7b-c7923278eafd req-2493b694-b9ec-4999-8a99-cec005e8c26c service nova] Lock "61b9bce5-6a3e-4149-a759-d08e2e2301ee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.340970] env[68638]: DEBUG oslo_concurrency.lockutils [req-8cc1d937-2dd4-4a81-8a7b-c7923278eafd req-2493b694-b9ec-4999-8a99-cec005e8c26c service nova] Lock "61b9bce5-6a3e-4149-a759-d08e2e2301ee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.341131] env[68638]: DEBUG nova.compute.manager [req-8cc1d937-2dd4-4a81-8a7b-c7923278eafd req-2493b694-b9ec-4999-8a99-cec005e8c26c service nova] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] No waiting events found dispatching network-vif-plugged-7200b9ae-be3f-4868-8707-b645c20bc18e {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 960.341317] env[68638]: WARNING nova.compute.manager [req-8cc1d937-2dd4-4a81-8a7b-c7923278eafd req-2493b694-b9ec-4999-8a99-cec005e8c26c service nova] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Received unexpected event network-vif-plugged-7200b9ae-be3f-4868-8707-b645c20bc18e for instance with vm_state building and task_state spawning. [ 960.341544] env[68638]: DEBUG nova.compute.manager [req-8cc1d937-2dd4-4a81-8a7b-c7923278eafd req-2493b694-b9ec-4999-8a99-cec005e8c26c service nova] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Received event network-changed-7200b9ae-be3f-4868-8707-b645c20bc18e {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 960.341796] env[68638]: DEBUG nova.compute.manager [req-8cc1d937-2dd4-4a81-8a7b-c7923278eafd req-2493b694-b9ec-4999-8a99-cec005e8c26c service nova] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Refreshing instance network info cache due to event network-changed-7200b9ae-be3f-4868-8707-b645c20bc18e. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 960.341998] env[68638]: DEBUG oslo_concurrency.lockutils [req-8cc1d937-2dd4-4a81-8a7b-c7923278eafd req-2493b694-b9ec-4999-8a99-cec005e8c26c service nova] Acquiring lock "refresh_cache-61b9bce5-6a3e-4149-a759-d08e2e2301ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.516244] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833955, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.581218] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Releasing lock "refresh_cache-61b9bce5-6a3e-4149-a759-d08e2e2301ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.581526] env[68638]: DEBUG nova.compute.manager [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Instance network_info: |[{"id": "7200b9ae-be3f-4868-8707-b645c20bc18e", "address": "fa:16:3e:fd:43:21", "network": {"id": "104a324f-fd5a-4c74-9a7a-6126392ea10c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1310127541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3e5757d1f74492481048df4a29032ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7200b9ae-be", "ovs_interfaceid": "7200b9ae-be3f-4868-8707-b645c20bc18e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 960.581838] env[68638]: DEBUG oslo_concurrency.lockutils [req-8cc1d937-2dd4-4a81-8a7b-c7923278eafd req-2493b694-b9ec-4999-8a99-cec005e8c26c service nova] Acquired lock "refresh_cache-61b9bce5-6a3e-4149-a759-d08e2e2301ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.582029] env[68638]: DEBUG nova.network.neutron [req-8cc1d937-2dd4-4a81-8a7b-c7923278eafd req-2493b694-b9ec-4999-8a99-cec005e8c26c service nova] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Refreshing network info cache for port 7200b9ae-be3f-4868-8707-b645c20bc18e {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 960.584069] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:43:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3d2e4070-a78e-4d08-a104-b6312ab65577', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7200b9ae-be3f-4868-8707-b645c20bc18e', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 960.590970] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 960.591462] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 960.592265] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-366d2c68-dc23-41d1-94d0-e172bf5b026f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.614688] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 960.614688] env[68638]: value = "task-2833958" [ 960.614688] env[68638]: _type = "Task" [ 960.614688] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.623484] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833958, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.636065] env[68638]: DEBUG oslo_vmware.api [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Task: {'id': task-2833956, 'name': ReconfigVM_Task, 'duration_secs': 0.181441} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.636065] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Reconfigured VM instance instance-00000030 to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 960.643527] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7b2dbbc-2b50-4c62-abe1-10fa851167d9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.665182] env[68638]: DEBUG oslo_vmware.api [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Waiting for the task: (returnval){ [ 960.665182] env[68638]: value = "task-2833959" [ 960.665182] env[68638]: _type = "Task" [ 960.665182] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.679042] env[68638]: DEBUG oslo_concurrency.lockutils [None req-669b81be-66d2-40d7-99f0-eb61aa774e61 tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "bb86aabd-129d-4c14-9db1-6676a5e7b9fa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.862s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.688938] env[68638]: DEBUG oslo_vmware.api [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2833954, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513567} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.695081] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 0249ffb9-82ed-44db-bb20-e619eaa176dd/0249ffb9-82ed-44db-bb20-e619eaa176dd.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 960.695220] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 960.695562] env[68638]: DEBUG oslo_vmware.api [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Task: {'id': task-2833959, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.695938] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4aa8d0f1-dcb7-47a2-b460-d1a8ca1e3749 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.709188] env[68638]: DEBUG oslo_vmware.api [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 960.709188] env[68638]: value = "task-2833960" [ 960.709188] env[68638]: _type = "Task" [ 960.709188] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.723040] env[68638]: DEBUG oslo_vmware.api [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2833960, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.015911] env[68638]: DEBUG oslo_vmware.api [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2833955, 'name': PowerOnVM_Task, 'duration_secs': 0.524899} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.016242] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 961.016419] env[68638]: DEBUG nova.compute.manager [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 961.017211] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aeff408-05ac-4648-802d-22fef8dde979 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.125016] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833958, 'name': CreateVM_Task, 'duration_secs': 0.353322} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.125720] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 961.125886] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/54968b0f-571c-4b4c-be55-3b4b458fd6b8" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.126069] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquired lock "[datastore1] devstack-image-cache_base/54968b0f-571c-4b4c-be55-3b4b458fd6b8" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.126462] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/54968b0f-571c-4b4c-be55-3b4b458fd6b8" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 961.126730] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e621093a-855c-40d9-8282-d5ee6b386658 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.132334] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 961.132334] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]521a9422-3ed0-287e-388c-204f945fcbb3" [ 961.132334] env[68638]: _type = "Task" [ 961.132334] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.141173] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]521a9422-3ed0-287e-388c-204f945fcbb3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.179114] env[68638]: DEBUG oslo_vmware.api [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Task: {'id': task-2833959, 'name': ReconfigVM_Task, 'duration_secs': 0.149335} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.181484] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569817', 'volume_id': 'a3de97f5-fa56-44b0-81e5-346fb44dddb0', 'name': 'volume-a3de97f5-fa56-44b0-81e5-346fb44dddb0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '039edcf8-7908-4be4-8bd3-0b55545b6f7b', 'attached_at': '', 'detached_at': '', 'volume_id': 'a3de97f5-fa56-44b0-81e5-346fb44dddb0', 'serial': 'a3de97f5-fa56-44b0-81e5-346fb44dddb0'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 961.181763] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 961.182938] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de595f5a-aec5-427e-8cd4-8505f43b1b32 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.189944] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 961.190197] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-74a849dd-054b-4135-a3a7-4764247a090b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.222531] env[68638]: DEBUG oslo_vmware.api [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2833960, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073816} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.225073] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 961.226097] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-416f19ed-0b75-4f0b-b033-3846f94c127e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.248750] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 0249ffb9-82ed-44db-bb20-e619eaa176dd/0249ffb9-82ed-44db-bb20-e619eaa176dd.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 961.253632] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf599242-49f8-43dd-847e-b3565640d9b9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.278821] env[68638]: DEBUG oslo_vmware.api [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 961.278821] env[68638]: value = "task-2833962" [ 961.278821] env[68638]: _type = "Task" [ 961.278821] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.286288] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 961.286539] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 961.286762] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Deleting the datastore file [datastore2] 039edcf8-7908-4be4-8bd3-0b55545b6f7b {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 961.287515] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-005e327b-c924-4ca8-a5ed-86c1722b2865 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.292227] env[68638]: DEBUG oslo_vmware.api [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2833962, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.299999] env[68638]: DEBUG oslo_vmware.api [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Waiting for the task: (returnval){ [ 961.299999] env[68638]: value = "task-2833963" [ 961.299999] env[68638]: _type = "Task" [ 961.299999] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.309889] env[68638]: DEBUG oslo_vmware.api [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Task: {'id': task-2833963, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.372228] env[68638]: DEBUG nova.network.neutron [req-8cc1d937-2dd4-4a81-8a7b-c7923278eafd req-2493b694-b9ec-4999-8a99-cec005e8c26c service nova] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Updated VIF entry in instance network info cache for port 7200b9ae-be3f-4868-8707-b645c20bc18e. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 961.372636] env[68638]: DEBUG nova.network.neutron [req-8cc1d937-2dd4-4a81-8a7b-c7923278eafd req-2493b694-b9ec-4999-8a99-cec005e8c26c service nova] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Updating instance_info_cache with network_info: [{"id": "7200b9ae-be3f-4868-8707-b645c20bc18e", "address": "fa:16:3e:fd:43:21", "network": {"id": "104a324f-fd5a-4c74-9a7a-6126392ea10c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1310127541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3e5757d1f74492481048df4a29032ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7200b9ae-be", "ovs_interfaceid": "7200b9ae-be3f-4868-8707-b645c20bc18e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.540918] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 961.547852] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c39463-e84c-4d23-af45-78c9ea3e779f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.556083] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe65c68c-a95f-4e2b-a822-dac4df8cfa76 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.585705] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "9c0d1c2d-88ea-40be-aef1-43b37b4dca3e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 961.585877] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "9c0d1c2d-88ea-40be-aef1-43b37b4dca3e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 961.586112] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "9c0d1c2d-88ea-40be-aef1-43b37b4dca3e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 961.586307] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "9c0d1c2d-88ea-40be-aef1-43b37b4dca3e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 961.586472] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "9c0d1c2d-88ea-40be-aef1-43b37b4dca3e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 961.588918] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6708534-623f-4fef-886d-736ccba1d6d5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.591529] env[68638]: INFO nova.compute.manager [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Terminating instance [ 961.597970] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9eaa20c-9258-4494-b69d-65efa2499b44 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.612641] env[68638]: DEBUG nova.compute.provider_tree [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 961.643018] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Releasing lock "[datastore1] devstack-image-cache_base/54968b0f-571c-4b4c-be55-3b4b458fd6b8" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 961.643272] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Processing image 54968b0f-571c-4b4c-be55-3b4b458fd6b8 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 961.643687] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/54968b0f-571c-4b4c-be55-3b4b458fd6b8/54968b0f-571c-4b4c-be55-3b4b458fd6b8.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.643687] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquired lock "[datastore1] devstack-image-cache_base/54968b0f-571c-4b4c-be55-3b4b458fd6b8/54968b0f-571c-4b4c-be55-3b4b458fd6b8.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.643819] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 961.644044] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6272663d-9044-4f6a-acbe-d88d125d6aad {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.652278] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 961.652445] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 961.653138] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e14bf2e0-18af-41a2-a5c9-67758cd07bb2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.658592] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 961.658592] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52998e6a-6e03-b6da-e0cf-33c77f81de1d" [ 961.658592] env[68638]: _type = "Task" [ 961.658592] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.670034] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52998e6a-6e03-b6da-e0cf-33c77f81de1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.792488] env[68638]: DEBUG oslo_vmware.api [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2833962, 'name': ReconfigVM_Task, 'duration_secs': 0.283444} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.792777] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 0249ffb9-82ed-44db-bb20-e619eaa176dd/0249ffb9-82ed-44db-bb20-e619eaa176dd.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 961.793442] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bc67d8af-c52d-4fea-8d59-5647083ed61f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.812953] env[68638]: DEBUG oslo_vmware.api [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Task: {'id': task-2833963, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.087307} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.814206] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 961.814405] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 961.814583] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 961.814807] env[68638]: INFO nova.compute.manager [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Took 2.33 seconds to destroy the instance on the hypervisor. [ 961.815046] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 961.815305] env[68638]: DEBUG oslo_vmware.api [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 961.815305] env[68638]: value = "task-2833964" [ 961.815305] env[68638]: _type = "Task" [ 961.815305] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.815496] env[68638]: DEBUG nova.compute.manager [-] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 961.815587] env[68638]: DEBUG nova.network.neutron [-] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 961.828899] env[68638]: DEBUG oslo_vmware.api [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2833964, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.877894] env[68638]: DEBUG oslo_concurrency.lockutils [req-8cc1d937-2dd4-4a81-8a7b-c7923278eafd req-2493b694-b9ec-4999-8a99-cec005e8c26c service nova] Releasing lock "refresh_cache-61b9bce5-6a3e-4149-a759-d08e2e2301ee" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.095366] env[68638]: DEBUG nova.compute.manager [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 962.095592] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 962.096594] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b64f31-8bef-4f71-aacc-ee8610fd3ba2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.104472] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 962.104733] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fd618048-3731-4b6a-a9e1-c10a41b3013f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.111418] env[68638]: DEBUG oslo_vmware.api [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 962.111418] env[68638]: value = "task-2833965" [ 962.111418] env[68638]: _type = "Task" [ 962.111418] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.115311] env[68638]: DEBUG nova.scheduler.client.report [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 962.129988] env[68638]: DEBUG oslo_vmware.api [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833965, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.169566] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Preparing fetch location {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 962.169833] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Fetch image to [datastore1] OSTACK_IMG_d3eafc41-8fc6-42f2-9ef4-7fe97aa2a187/OSTACK_IMG_d3eafc41-8fc6-42f2-9ef4-7fe97aa2a187.vmdk {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 962.170026] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Downloading stream optimized image 54968b0f-571c-4b4c-be55-3b4b458fd6b8 to [datastore1] OSTACK_IMG_d3eafc41-8fc6-42f2-9ef4-7fe97aa2a187/OSTACK_IMG_d3eafc41-8fc6-42f2-9ef4-7fe97aa2a187.vmdk on the data store datastore1 as vApp {{(pid=68638) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 962.170205] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Downloading image file data 54968b0f-571c-4b4c-be55-3b4b458fd6b8 to the ESX as VM named 'OSTACK_IMG_d3eafc41-8fc6-42f2-9ef4-7fe97aa2a187' {{(pid=68638) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 962.213664] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 962.214599] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 962.260149] env[68638]: DEBUG oslo_vmware.rw_handles [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 962.260149] env[68638]: value = "resgroup-9" [ 962.260149] env[68638]: _type = "ResourcePool" [ 962.260149] env[68638]: }. {{(pid=68638) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 962.260536] env[68638]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-53df67cc-d629-420a-a73e-5de92b56507c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.286209] env[68638]: DEBUG oslo_vmware.rw_handles [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lease: (returnval){ [ 962.286209] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52eff865-4e64-3fb1-4833-fc4829be5eef" [ 962.286209] env[68638]: _type = "HttpNfcLease" [ 962.286209] env[68638]: } obtained for vApp import into resource pool (val){ [ 962.286209] env[68638]: value = "resgroup-9" [ 962.286209] env[68638]: _type = "ResourcePool" [ 962.286209] env[68638]: }. {{(pid=68638) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 962.286619] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the lease: (returnval){ [ 962.286619] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52eff865-4e64-3fb1-4833-fc4829be5eef" [ 962.286619] env[68638]: _type = "HttpNfcLease" [ 962.286619] env[68638]: } to be ready. {{(pid=68638) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 962.294391] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 962.294391] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52eff865-4e64-3fb1-4833-fc4829be5eef" [ 962.294391] env[68638]: _type = "HttpNfcLease" [ 962.294391] env[68638]: } is initializing. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 962.328772] env[68638]: DEBUG oslo_vmware.api [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2833964, 'name': Rename_Task, 'duration_secs': 0.281716} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.329119] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 962.329405] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-42e96d87-3c87-403b-9a21-2397a31c9f63 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.339624] env[68638]: DEBUG oslo_vmware.api [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 962.339624] env[68638]: value = "task-2833967" [ 962.339624] env[68638]: _type = "Task" [ 962.339624] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.348219] env[68638]: DEBUG oslo_vmware.api [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2833967, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.480124] env[68638]: DEBUG nova.compute.manager [req-5aa1a9cd-104a-4368-861a-e2819ae8ec54 req-8036fbd3-78e2-4479-8043-31b0920b873a service nova] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Received event network-vif-deleted-83007aec-935b-4f0b-9797-0a3e4b7435e7 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 962.480364] env[68638]: INFO nova.compute.manager [req-5aa1a9cd-104a-4368-861a-e2819ae8ec54 req-8036fbd3-78e2-4479-8043-31b0920b873a service nova] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Neutron deleted interface 83007aec-935b-4f0b-9797-0a3e4b7435e7; detaching it from the instance and deleting it from the info cache [ 962.480578] env[68638]: DEBUG nova.network.neutron [req-5aa1a9cd-104a-4368-861a-e2819ae8ec54 req-8036fbd3-78e2-4479-8043-31b0920b873a service nova] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.620330] env[68638]: DEBUG oslo_concurrency.lockutils [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.487s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.620910] env[68638]: DEBUG nova.compute.manager [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 962.627477] env[68638]: DEBUG oslo_concurrency.lockutils [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.362s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.627691] env[68638]: DEBUG nova.objects.instance [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Lazy-loading 'resources' on Instance uuid 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 962.632032] env[68638]: DEBUG oslo_vmware.api [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833965, 'name': PowerOffVM_Task, 'duration_secs': 0.243446} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.632032] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 962.632032] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 962.632032] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c002691d-f5c1-40ca-8b40-aa90c96bc5b0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.704958] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 962.707236] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 962.707236] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Deleting the datastore file [datastore2] 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 962.707236] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57f53ca5-9e0c-4670-b2f9-5e87ab9c2747 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.714857] env[68638]: DEBUG oslo_vmware.api [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 962.714857] env[68638]: value = "task-2833969" [ 962.714857] env[68638]: _type = "Task" [ 962.714857] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.727119] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 962.731051] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 962.731387] env[68638]: DEBUG oslo_vmware.api [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833969, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.732056] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 962.732056] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 962.732253] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 962.732340] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 962.732510] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68638) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 962.732733] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager.update_available_resource {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 962.800509] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 962.800509] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52eff865-4e64-3fb1-4833-fc4829be5eef" [ 962.800509] env[68638]: _type = "HttpNfcLease" [ 962.800509] env[68638]: } is ready. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 962.801054] env[68638]: DEBUG oslo_vmware.rw_handles [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 962.801054] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52eff865-4e64-3fb1-4833-fc4829be5eef" [ 962.801054] env[68638]: _type = "HttpNfcLease" [ 962.801054] env[68638]: }. {{(pid=68638) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 962.802183] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb1286ef-5542-44a8-90fe-d274e4b1cfde {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.812880] env[68638]: DEBUG oslo_vmware.rw_handles [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5265890a-67bc-d6e4-becc-0416af4de916/disk-0.vmdk from lease info. {{(pid=68638) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 962.812880] env[68638]: DEBUG oslo_vmware.rw_handles [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5265890a-67bc-d6e4-becc-0416af4de916/disk-0.vmdk. {{(pid=68638) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 962.875194] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-05d7e731-f2ca-4226-9c7d-a45e9e2a833e tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Volume attach. Driver type: vmdk {{(pid=68638) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 962.875485] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-05d7e731-f2ca-4226-9c7d-a45e9e2a833e tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569960', 'volume_id': 'eb98862f-bff4-43a3-b7cb-9025589cf53e', 'name': 'volume-eb98862f-bff4-43a3-b7cb-9025589cf53e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '90c192bd-b823-414c-b793-260eacc9904f', 'attached_at': '', 'detached_at': '', 'volume_id': 'eb98862f-bff4-43a3-b7cb-9025589cf53e', 'serial': 'eb98862f-bff4-43a3-b7cb-9025589cf53e'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 962.879609] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57ff8580-61f4-4110-becd-451cdd3775a0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.887624] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ba91eb7a-df01-47a9-a8a0-2ee8787fae22 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.927669] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e062c1ec-075f-46ce-8fd6-d24d0e5ebd23 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.930752] env[68638]: DEBUG oslo_vmware.api [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2833967, 'name': PowerOnVM_Task, 'duration_secs': 0.478993} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.931140] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 962.931342] env[68638]: INFO nova.compute.manager [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Took 7.48 seconds to spawn the instance on the hypervisor. [ 962.931525] env[68638]: DEBUG nova.compute.manager [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 962.932818] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c8899a-44d6-49c7-a1a1-95089e9b55ec {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.956368] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-05d7e731-f2ca-4226-9c7d-a45e9e2a833e tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] volume-eb98862f-bff4-43a3-b7cb-9025589cf53e/volume-eb98862f-bff4-43a3-b7cb-9025589cf53e.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 962.957401] env[68638]: DEBUG nova.network.neutron [-] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.958697] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9c60f4b-8a41-4eed-9bb3-36c52c84c895 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.984770] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-15bdfe82-c0a2-41ae-a4b4-a0b2fcbe8c1a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.987194] env[68638]: DEBUG oslo_vmware.api [None req-05d7e731-f2ca-4226-9c7d-a45e9e2a833e tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 962.987194] env[68638]: value = "task-2833970" [ 962.987194] env[68638]: _type = "Task" [ 962.987194] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.994949] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcae05a2-2300-4330-a4f7-243c23cf5c10 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.009238] env[68638]: DEBUG oslo_vmware.api [None req-05d7e731-f2ca-4226-9c7d-a45e9e2a833e tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2833970, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.045070] env[68638]: DEBUG nova.compute.manager [req-5aa1a9cd-104a-4368-861a-e2819ae8ec54 req-8036fbd3-78e2-4479-8043-31b0920b873a service nova] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Detach interface failed, port_id=83007aec-935b-4f0b-9797-0a3e4b7435e7, reason: Instance 039edcf8-7908-4be4-8bd3-0b55545b6f7b could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 963.134100] env[68638]: DEBUG nova.compute.utils [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 963.140341] env[68638]: DEBUG nova.compute.manager [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 963.141108] env[68638]: DEBUG nova.network.neutron [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 963.187852] env[68638]: DEBUG nova.policy [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '815b8ce8a95a4f76a28506fe20117298', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2ae89c3992e04141bf24be9d9e84e302', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 963.229999] env[68638]: DEBUG oslo_vmware.api [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2833969, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157439} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.230278] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 963.230528] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 963.230749] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 963.230971] env[68638]: INFO nova.compute.manager [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Took 1.14 seconds to destroy the instance on the hypervisor. [ 963.231240] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 963.231435] env[68638]: DEBUG nova.compute.manager [-] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 963.231528] env[68638]: DEBUG nova.network.neutron [-] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 963.240024] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 963.475628] env[68638]: INFO nova.compute.manager [-] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Took 1.66 seconds to deallocate network for instance. [ 963.487849] env[68638]: INFO nova.compute.manager [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Took 34.55 seconds to build instance. [ 963.504216] env[68638]: DEBUG oslo_vmware.api [None req-05d7e731-f2ca-4226-9c7d-a45e9e2a833e tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2833970, 'name': ReconfigVM_Task, 'duration_secs': 0.459361} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.504216] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-05d7e731-f2ca-4226-9c7d-a45e9e2a833e tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Reconfigured VM instance instance-00000033 to attach disk [datastore1] volume-eb98862f-bff4-43a3-b7cb-9025589cf53e/volume-eb98862f-bff4-43a3-b7cb-9025589cf53e.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 963.514328] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e9069cd-fad7-4364-983f-776ceb26221d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.533575] env[68638]: DEBUG oslo_vmware.api [None req-05d7e731-f2ca-4226-9c7d-a45e9e2a833e tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 963.533575] env[68638]: value = "task-2833971" [ 963.533575] env[68638]: _type = "Task" [ 963.533575] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.546737] env[68638]: DEBUG oslo_vmware.api [None req-05d7e731-f2ca-4226-9c7d-a45e9e2a833e tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2833971, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.631525] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2fb6e74-f8fb-4c2d-b3d9-ae1b2b703a82 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.641689] env[68638]: DEBUG nova.compute.manager [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 963.654346] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fba3e04e-bd0c-44d1-b114-677b19954e4c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.698915] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a90294aa-0f90-4e52-8806-ecd0cb832d0c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.710020] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-565965a0-6e1a-4030-ae03-97d4cd445767 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.733632] env[68638]: DEBUG nova.compute.provider_tree [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 963.756673] env[68638]: DEBUG oslo_vmware.rw_handles [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Completed reading data from the image iterator. {{(pid=68638) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 963.756938] env[68638]: DEBUG oslo_vmware.rw_handles [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5265890a-67bc-d6e4-becc-0416af4de916/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 963.757888] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b186147-7a78-4403-bf94-852c92b2e9cd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.765248] env[68638]: DEBUG oslo_vmware.rw_handles [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5265890a-67bc-d6e4-becc-0416af4de916/disk-0.vmdk is in state: ready. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 963.765415] env[68638]: DEBUG oslo_vmware.rw_handles [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5265890a-67bc-d6e4-becc-0416af4de916/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 963.765653] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-3c8fe400-7b9b-4563-a5ec-844b1d400669 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.791923] env[68638]: DEBUG nova.network.neutron [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Successfully created port: eb5d1f40-255a-48f5-aacc-0ca9cdafcfa4 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 963.992762] env[68638]: DEBUG oslo_concurrency.lockutils [None req-15b44ab8-8cbb-488c-a030-53d272077850 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "0249ffb9-82ed-44db-bb20-e619eaa176dd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.071s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.043815] env[68638]: DEBUG oslo_vmware.api [None req-05d7e731-f2ca-4226-9c7d-a45e9e2a833e tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2833971, 'name': ReconfigVM_Task, 'duration_secs': 0.183464} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.044847] env[68638]: INFO nova.compute.manager [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Took 0.57 seconds to detach 1 volumes for instance. [ 964.047233] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-05d7e731-f2ca-4226-9c7d-a45e9e2a833e tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569960', 'volume_id': 'eb98862f-bff4-43a3-b7cb-9025589cf53e', 'name': 'volume-eb98862f-bff4-43a3-b7cb-9025589cf53e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '90c192bd-b823-414c-b793-260eacc9904f', 'attached_at': '', 'detached_at': '', 'volume_id': 'eb98862f-bff4-43a3-b7cb-9025589cf53e', 'serial': 'eb98862f-bff4-43a3-b7cb-9025589cf53e'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 964.049053] env[68638]: DEBUG nova.compute.manager [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Deleting volume: a3de97f5-fa56-44b0-81e5-346fb44dddb0 {{(pid=68638) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 964.237575] env[68638]: DEBUG nova.scheduler.client.report [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 964.241842] env[68638]: DEBUG nova.network.neutron [-] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.261416] env[68638]: DEBUG oslo_vmware.rw_handles [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5265890a-67bc-d6e4-becc-0416af4de916/disk-0.vmdk. {{(pid=68638) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 964.263580] env[68638]: INFO nova.virt.vmwareapi.images [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Downloaded image file data 54968b0f-571c-4b4c-be55-3b4b458fd6b8 [ 964.263580] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a59274f-c19b-4871-b0df-1deb7e7fdd83 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.280483] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7c58ed97-ae71-4851-8d17-78ce2fb8ff52 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.360094] env[68638]: INFO nova.virt.vmwareapi.images [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] The imported VM was unregistered [ 964.362755] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Caching image {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 964.363030] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Creating directory with path [datastore1] devstack-image-cache_base/54968b0f-571c-4b4c-be55-3b4b458fd6b8 {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 964.363437] env[68638]: DEBUG oslo_concurrency.lockutils [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Acquiring lock "30193a76-a391-4a64-98cc-7e22dcf7218c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.363708] env[68638]: DEBUG oslo_concurrency.lockutils [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Lock "30193a76-a391-4a64-98cc-7e22dcf7218c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.363928] env[68638]: DEBUG oslo_concurrency.lockutils [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Acquiring lock "30193a76-a391-4a64-98cc-7e22dcf7218c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.364138] env[68638]: DEBUG oslo_concurrency.lockutils [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Lock "30193a76-a391-4a64-98cc-7e22dcf7218c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.364310] env[68638]: DEBUG oslo_concurrency.lockutils [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Lock "30193a76-a391-4a64-98cc-7e22dcf7218c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.366105] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-becb2bd8-6574-42e1-bdfe-8dec6b59e52f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.368520] env[68638]: INFO nova.compute.manager [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Terminating instance [ 964.385111] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Created directory with path [datastore1] devstack-image-cache_base/54968b0f-571c-4b4c-be55-3b4b458fd6b8 {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 964.385111] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_d3eafc41-8fc6-42f2-9ef4-7fe97aa2a187/OSTACK_IMG_d3eafc41-8fc6-42f2-9ef4-7fe97aa2a187.vmdk to [datastore1] devstack-image-cache_base/54968b0f-571c-4b4c-be55-3b4b458fd6b8/54968b0f-571c-4b4c-be55-3b4b458fd6b8.vmdk. {{(pid=68638) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 964.385111] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-73fcf95e-7563-4c4a-9149-0a4c5cebf80d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.392854] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 964.392854] env[68638]: value = "task-2833974" [ 964.392854] env[68638]: _type = "Task" [ 964.392854] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.404735] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833974, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.513888] env[68638]: DEBUG nova.compute.manager [req-8ef7fce1-b087-422d-9a9d-6a5cfd625816 req-c6bf6c61-4312-4eaf-a888-562367e42aec service nova] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Received event network-vif-deleted-3a6c2c2c-b195-47d0-a907-17dac5df15df {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 964.593060] env[68638]: DEBUG oslo_concurrency.lockutils [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.667851] env[68638]: DEBUG nova.compute.manager [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 964.697755] env[68638]: DEBUG nova.virt.hardware [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 964.698189] env[68638]: DEBUG nova.virt.hardware [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 964.698362] env[68638]: DEBUG nova.virt.hardware [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 964.698634] env[68638]: DEBUG nova.virt.hardware [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 964.698789] env[68638]: DEBUG nova.virt.hardware [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 964.699118] env[68638]: DEBUG nova.virt.hardware [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 964.699350] env[68638]: DEBUG nova.virt.hardware [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 964.699511] env[68638]: DEBUG nova.virt.hardware [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 964.699688] env[68638]: DEBUG nova.virt.hardware [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 964.699852] env[68638]: DEBUG nova.virt.hardware [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 964.700171] env[68638]: DEBUG nova.virt.hardware [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 964.701328] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4968fe6-a033-48d0-a093-6ba9c2eb2544 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.712760] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3e3e1e-ebb6-4438-bcc3-bea88f98a604 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.745055] env[68638]: DEBUG oslo_concurrency.lockutils [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.117s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.747705] env[68638]: DEBUG oslo_concurrency.lockutils [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.623s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.750171] env[68638]: DEBUG nova.objects.instance [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Lazy-loading 'resources' on Instance uuid cd27220d-c706-4450-a01b-c871c608056f {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 964.750171] env[68638]: INFO nova.compute.manager [-] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Took 1.52 seconds to deallocate network for instance. [ 964.783318] env[68638]: INFO nova.scheduler.client.report [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Deleted allocations for instance 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9 [ 964.875103] env[68638]: DEBUG nova.compute.manager [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 964.875404] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 964.876377] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9bba06-0c04-47b3-aff1-c185a5d56aa9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.886291] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 964.886679] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73348550-2462-4b8e-9e1b-0b7d771ad09c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.895592] env[68638]: DEBUG oslo_vmware.api [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Waiting for the task: (returnval){ [ 964.895592] env[68638]: value = "task-2833975" [ 964.895592] env[68638]: _type = "Task" [ 964.895592] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.918078] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833974, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.920257] env[68638]: DEBUG oslo_vmware.api [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Task: {'id': task-2833975, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.090150] env[68638]: DEBUG nova.objects.instance [None req-05d7e731-f2ca-4226-9c7d-a45e9e2a833e tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lazy-loading 'flavor' on Instance uuid 90c192bd-b823-414c-b793-260eacc9904f {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 965.258719] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 965.294411] env[68638]: DEBUG oslo_concurrency.lockutils [None req-025aecda-857a-44f9-aaaa-260a7f69b1f2 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Lock "9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.434s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.411228] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833974, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.415611] env[68638]: DEBUG oslo_vmware.api [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Task: {'id': task-2833975, 'name': PowerOffVM_Task, 'duration_secs': 0.39126} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.422653] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 965.422979] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 965.423609] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-07f0e80d-551c-4106-aeb0-5a29a9c9a8c4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.517403] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 965.517403] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 965.517403] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Deleting the datastore file [datastore2] 30193a76-a391-4a64-98cc-7e22dcf7218c {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 965.517403] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7fe21631-4d8c-4ba7-8197-4b97948906ea {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.522771] env[68638]: DEBUG oslo_vmware.api [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Waiting for the task: (returnval){ [ 965.522771] env[68638]: value = "task-2833977" [ 965.522771] env[68638]: _type = "Task" [ 965.522771] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.537873] env[68638]: DEBUG oslo_vmware.api [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Task: {'id': task-2833977, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.575325] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquiring lock "ee752ace-fa19-4fd7-af89-f6628ce3d087" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 965.575630] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Lock "ee752ace-fa19-4fd7-af89-f6628ce3d087" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 965.575907] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquiring lock "ee752ace-fa19-4fd7-af89-f6628ce3d087-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 965.576129] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Lock "ee752ace-fa19-4fd7-af89-f6628ce3d087-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 965.576308] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Lock "ee752ace-fa19-4fd7-af89-f6628ce3d087-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.579167] env[68638]: INFO nova.compute.manager [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Terminating instance [ 965.604886] env[68638]: DEBUG oslo_concurrency.lockutils [None req-05d7e731-f2ca-4226-9c7d-a45e9e2a833e tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "90c192bd-b823-414c-b793-260eacc9904f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.905s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.760498] env[68638]: DEBUG nova.network.neutron [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Successfully updated port: eb5d1f40-255a-48f5-aacc-0ca9cdafcfa4 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 965.786542] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56bf7356-2e02-4e12-8dd4-35e839917d01 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.796773] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e5d8765-8593-41e8-b214-f0458edc2c87 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.836589] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43c0f43-3431-4bb8-b6eb-8cc585a42593 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.845278] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6eb5797-a0e8-4d76-ba15-e914be9477f6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.860959] env[68638]: DEBUG nova.compute.provider_tree [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 965.907311] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833974, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.037896] env[68638]: DEBUG oslo_vmware.api [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Task: {'id': task-2833977, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.440837} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.038371] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 966.038687] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 966.038996] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 966.039313] env[68638]: INFO nova.compute.manager [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Took 1.16 seconds to destroy the instance on the hypervisor. [ 966.039717] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 966.040042] env[68638]: DEBUG nova.compute.manager [-] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 966.040190] env[68638]: DEBUG nova.network.neutron [-] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 966.085014] env[68638]: DEBUG nova.compute.manager [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 966.085304] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 966.086664] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83135a41-662b-46f4-84d6-c97ae41c75c9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.096353] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 966.096659] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eac7fdc0-7225-4fd9-80cd-bc907f450c02 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.104216] env[68638]: DEBUG oslo_vmware.api [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 966.104216] env[68638]: value = "task-2833978" [ 966.104216] env[68638]: _type = "Task" [ 966.104216] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.113555] env[68638]: DEBUG oslo_vmware.api [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833978, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.219318] env[68638]: DEBUG oslo_concurrency.lockutils [None req-44f6e972-9959-4fd0-8b2a-5434f85c2dc8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "90c192bd-b823-414c-b793-260eacc9904f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 966.219601] env[68638]: DEBUG oslo_concurrency.lockutils [None req-44f6e972-9959-4fd0-8b2a-5434f85c2dc8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "90c192bd-b823-414c-b793-260eacc9904f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.219783] env[68638]: DEBUG nova.compute.manager [None req-44f6e972-9959-4fd0-8b2a-5434f85c2dc8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 966.220778] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b901e2-69bb-41f1-bda8-cd209370e246 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.228709] env[68638]: DEBUG nova.compute.manager [None req-44f6e972-9959-4fd0-8b2a-5434f85c2dc8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68638) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 966.229654] env[68638]: DEBUG nova.objects.instance [None req-44f6e972-9959-4fd0-8b2a-5434f85c2dc8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lazy-loading 'flavor' on Instance uuid 90c192bd-b823-414c-b793-260eacc9904f {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 966.266241] env[68638]: DEBUG oslo_concurrency.lockutils [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "refresh_cache-94af9123-435f-4ae4-8b6d-82838df61d4e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.266509] env[68638]: DEBUG oslo_concurrency.lockutils [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired lock "refresh_cache-94af9123-435f-4ae4-8b6d-82838df61d4e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 966.266727] env[68638]: DEBUG nova.network.neutron [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 966.365474] env[68638]: DEBUG nova.scheduler.client.report [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 966.410050] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833974, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.615310] env[68638]: DEBUG oslo_vmware.api [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833978, 'name': PowerOffVM_Task, 'duration_secs': 0.482704} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.615629] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 966.615921] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 966.616148] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8c218561-97d2-4a4e-922d-83480e846f68 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.667842] env[68638]: DEBUG nova.compute.manager [req-40204652-ed6c-4d80-809a-b849059a5071 req-1eb5ebe9-8c77-467a-b484-83271f23b733 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Received event network-changed-d0023f1c-323c-4f1c-a82c-45ad56565341 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 966.668209] env[68638]: DEBUG nova.compute.manager [req-40204652-ed6c-4d80-809a-b849059a5071 req-1eb5ebe9-8c77-467a-b484-83271f23b733 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Refreshing instance network info cache due to event network-changed-d0023f1c-323c-4f1c-a82c-45ad56565341. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 966.668404] env[68638]: DEBUG oslo_concurrency.lockutils [req-40204652-ed6c-4d80-809a-b849059a5071 req-1eb5ebe9-8c77-467a-b484-83271f23b733 service nova] Acquiring lock "refresh_cache-0249ffb9-82ed-44db-bb20-e619eaa176dd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.669231] env[68638]: DEBUG oslo_concurrency.lockutils [req-40204652-ed6c-4d80-809a-b849059a5071 req-1eb5ebe9-8c77-467a-b484-83271f23b733 service nova] Acquired lock "refresh_cache-0249ffb9-82ed-44db-bb20-e619eaa176dd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 966.669231] env[68638]: DEBUG nova.network.neutron [req-40204652-ed6c-4d80-809a-b849059a5071 req-1eb5ebe9-8c77-467a-b484-83271f23b733 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Refreshing network info cache for port d0023f1c-323c-4f1c-a82c-45ad56565341 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 966.708467] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 966.708579] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 966.708774] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Deleting the datastore file [datastore2] ee752ace-fa19-4fd7-af89-f6628ce3d087 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 966.709048] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c6fde598-b7c3-498b-9585-f643e4f37bb8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.716685] env[68638]: DEBUG oslo_vmware.api [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for the task: (returnval){ [ 966.716685] env[68638]: value = "task-2833980" [ 966.716685] env[68638]: _type = "Task" [ 966.716685] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.725555] env[68638]: DEBUG oslo_vmware.api [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833980, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.798940] env[68638]: DEBUG nova.network.neutron [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 966.869930] env[68638]: DEBUG oslo_concurrency.lockutils [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.122s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.876328] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.482s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.877975] env[68638]: INFO nova.compute.claims [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 966.899022] env[68638]: INFO nova.scheduler.client.report [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Deleted allocations for instance cd27220d-c706-4450-a01b-c871c608056f [ 966.911295] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833974, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.964183] env[68638]: DEBUG nova.network.neutron [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Updating instance_info_cache with network_info: [{"id": "eb5d1f40-255a-48f5-aacc-0ca9cdafcfa4", "address": "fa:16:3e:f0:6e:e3", "network": {"id": "4ccf9e56-9fb3-48ff-bf2d-a35faedb905b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1191830363-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ae89c3992e04141bf24be9d9e84e302", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb5d1f40-25", "ovs_interfaceid": "eb5d1f40-255a-48f5-aacc-0ca9cdafcfa4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.226546] env[68638]: DEBUG oslo_vmware.api [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Task: {'id': task-2833980, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.396763} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.226809] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 967.226997] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 967.227192] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 967.227366] env[68638]: INFO nova.compute.manager [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Took 1.14 seconds to destroy the instance on the hypervisor. [ 967.227607] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 967.227794] env[68638]: DEBUG nova.compute.manager [-] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 967.227891] env[68638]: DEBUG nova.network.neutron [-] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 967.229570] env[68638]: DEBUG nova.network.neutron [-] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.236519] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-44f6e972-9959-4fd0-8b2a-5434f85c2dc8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 967.236812] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-05054e0c-a46e-4ca4-8c2c-b2830785d96b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.244381] env[68638]: DEBUG oslo_vmware.api [None req-44f6e972-9959-4fd0-8b2a-5434f85c2dc8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 967.244381] env[68638]: value = "task-2833981" [ 967.244381] env[68638]: _type = "Task" [ 967.244381] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.252996] env[68638]: DEBUG oslo_vmware.api [None req-44f6e972-9959-4fd0-8b2a-5434f85c2dc8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2833981, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.411399] env[68638]: DEBUG oslo_concurrency.lockutils [None req-42931d6b-b55b-4b7a-ab19-242be9990298 tempest-ServerAddressesNegativeTestJSON-1601178449 tempest-ServerAddressesNegativeTestJSON-1601178449-project-member] Lock "cd27220d-c706-4450-a01b-c871c608056f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.813s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.418223] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833974, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.676338} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.418223] env[68638]: INFO nova.virt.vmwareapi.ds_util [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_d3eafc41-8fc6-42f2-9ef4-7fe97aa2a187/OSTACK_IMG_d3eafc41-8fc6-42f2-9ef4-7fe97aa2a187.vmdk to [datastore1] devstack-image-cache_base/54968b0f-571c-4b4c-be55-3b4b458fd6b8/54968b0f-571c-4b4c-be55-3b4b458fd6b8.vmdk. [ 967.418223] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Cleaning up location [datastore1] OSTACK_IMG_d3eafc41-8fc6-42f2-9ef4-7fe97aa2a187 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 967.418223] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_d3eafc41-8fc6-42f2-9ef4-7fe97aa2a187 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 967.418405] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30f5c72e-73e0-48c3-a633-055d1e184ce6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.426972] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 967.426972] env[68638]: value = "task-2833982" [ 967.426972] env[68638]: _type = "Task" [ 967.426972] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.438815] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833982, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.468842] env[68638]: DEBUG oslo_concurrency.lockutils [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Releasing lock "refresh_cache-94af9123-435f-4ae4-8b6d-82838df61d4e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 967.468842] env[68638]: DEBUG nova.compute.manager [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Instance network_info: |[{"id": "eb5d1f40-255a-48f5-aacc-0ca9cdafcfa4", "address": "fa:16:3e:f0:6e:e3", "network": {"id": "4ccf9e56-9fb3-48ff-bf2d-a35faedb905b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1191830363-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ae89c3992e04141bf24be9d9e84e302", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb5d1f40-25", "ovs_interfaceid": "eb5d1f40-255a-48f5-aacc-0ca9cdafcfa4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 967.468842] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:6e:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e547d234-640c-449b-8279-0b16f75d6627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eb5d1f40-255a-48f5-aacc-0ca9cdafcfa4', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 967.477651] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 967.478219] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 967.478466] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb1ebcf2-6752-4153-8fe4-61343fc2b8f1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.498447] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 967.498447] env[68638]: value = "task-2833983" [ 967.498447] env[68638]: _type = "Task" [ 967.498447] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.506500] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833983, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.539419] env[68638]: DEBUG nova.network.neutron [req-40204652-ed6c-4d80-809a-b849059a5071 req-1eb5ebe9-8c77-467a-b484-83271f23b733 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Updated VIF entry in instance network info cache for port d0023f1c-323c-4f1c-a82c-45ad56565341. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 967.539800] env[68638]: DEBUG nova.network.neutron [req-40204652-ed6c-4d80-809a-b849059a5071 req-1eb5ebe9-8c77-467a-b484-83271f23b733 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Updating instance_info_cache with network_info: [{"id": "d0023f1c-323c-4f1c-a82c-45ad56565341", "address": "fa:16:3e:33:9f:b3", "network": {"id": "4723bf0f-71b1-4997-b6ce-06f7dbda84df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-488516254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee5d59c43e974d04ba56981f2716ff60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0023f1c-32", "ovs_interfaceid": "d0023f1c-323c-4f1c-a82c-45ad56565341", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.731852] env[68638]: INFO nova.compute.manager [-] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Took 1.69 seconds to deallocate network for instance. [ 967.754283] env[68638]: DEBUG oslo_vmware.api [None req-44f6e972-9959-4fd0-8b2a-5434f85c2dc8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2833981, 'name': PowerOffVM_Task, 'duration_secs': 0.234817} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.754565] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-44f6e972-9959-4fd0-8b2a-5434f85c2dc8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 967.754821] env[68638]: DEBUG nova.compute.manager [None req-44f6e972-9959-4fd0-8b2a-5434f85c2dc8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 967.755665] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf77afb-e948-4063-96bb-18c29a199c34 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.937617] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833982, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.06089} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.940889] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 967.941146] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Releasing lock "[datastore1] devstack-image-cache_base/54968b0f-571c-4b4c-be55-3b4b458fd6b8/54968b0f-571c-4b4c-be55-3b4b458fd6b8.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 967.941467] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/54968b0f-571c-4b4c-be55-3b4b458fd6b8/54968b0f-571c-4b4c-be55-3b4b458fd6b8.vmdk to [datastore1] 61b9bce5-6a3e-4149-a759-d08e2e2301ee/61b9bce5-6a3e-4149-a759-d08e2e2301ee.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 967.941937] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-50751adb-a7a4-4c82-b280-d577df269ed3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.949785] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 967.949785] env[68638]: value = "task-2833984" [ 967.949785] env[68638]: _type = "Task" [ 967.949785] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.961373] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833984, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.009542] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833983, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.043702] env[68638]: DEBUG oslo_concurrency.lockutils [req-40204652-ed6c-4d80-809a-b849059a5071 req-1eb5ebe9-8c77-467a-b484-83271f23b733 service nova] Releasing lock "refresh_cache-0249ffb9-82ed-44db-bb20-e619eaa176dd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 968.044131] env[68638]: DEBUG nova.compute.manager [req-40204652-ed6c-4d80-809a-b849059a5071 req-1eb5ebe9-8c77-467a-b484-83271f23b733 service nova] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Received event network-vif-plugged-eb5d1f40-255a-48f5-aacc-0ca9cdafcfa4 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 968.044348] env[68638]: DEBUG oslo_concurrency.lockutils [req-40204652-ed6c-4d80-809a-b849059a5071 req-1eb5ebe9-8c77-467a-b484-83271f23b733 service nova] Acquiring lock "94af9123-435f-4ae4-8b6d-82838df61d4e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.044561] env[68638]: DEBUG oslo_concurrency.lockutils [req-40204652-ed6c-4d80-809a-b849059a5071 req-1eb5ebe9-8c77-467a-b484-83271f23b733 service nova] Lock "94af9123-435f-4ae4-8b6d-82838df61d4e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.044746] env[68638]: DEBUG oslo_concurrency.lockutils [req-40204652-ed6c-4d80-809a-b849059a5071 req-1eb5ebe9-8c77-467a-b484-83271f23b733 service nova] Lock "94af9123-435f-4ae4-8b6d-82838df61d4e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.044928] env[68638]: DEBUG nova.compute.manager [req-40204652-ed6c-4d80-809a-b849059a5071 req-1eb5ebe9-8c77-467a-b484-83271f23b733 service nova] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] No waiting events found dispatching network-vif-plugged-eb5d1f40-255a-48f5-aacc-0ca9cdafcfa4 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 968.045124] env[68638]: WARNING nova.compute.manager [req-40204652-ed6c-4d80-809a-b849059a5071 req-1eb5ebe9-8c77-467a-b484-83271f23b733 service nova] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Received unexpected event network-vif-plugged-eb5d1f40-255a-48f5-aacc-0ca9cdafcfa4 for instance with vm_state building and task_state spawning. [ 968.045301] env[68638]: DEBUG nova.compute.manager [req-40204652-ed6c-4d80-809a-b849059a5071 req-1eb5ebe9-8c77-467a-b484-83271f23b733 service nova] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Received event network-changed-eb5d1f40-255a-48f5-aacc-0ca9cdafcfa4 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 968.045519] env[68638]: DEBUG nova.compute.manager [req-40204652-ed6c-4d80-809a-b849059a5071 req-1eb5ebe9-8c77-467a-b484-83271f23b733 service nova] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Refreshing instance network info cache due to event network-changed-eb5d1f40-255a-48f5-aacc-0ca9cdafcfa4. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 968.045749] env[68638]: DEBUG oslo_concurrency.lockutils [req-40204652-ed6c-4d80-809a-b849059a5071 req-1eb5ebe9-8c77-467a-b484-83271f23b733 service nova] Acquiring lock "refresh_cache-94af9123-435f-4ae4-8b6d-82838df61d4e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.045945] env[68638]: DEBUG oslo_concurrency.lockutils [req-40204652-ed6c-4d80-809a-b849059a5071 req-1eb5ebe9-8c77-467a-b484-83271f23b733 service nova] Acquired lock "refresh_cache-94af9123-435f-4ae4-8b6d-82838df61d4e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.046151] env[68638]: DEBUG nova.network.neutron [req-40204652-ed6c-4d80-809a-b849059a5071 req-1eb5ebe9-8c77-467a-b484-83271f23b733 service nova] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Refreshing network info cache for port eb5d1f40-255a-48f5-aacc-0ca9cdafcfa4 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 968.117385] env[68638]: DEBUG nova.network.neutron [-] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.241687] env[68638]: DEBUG oslo_concurrency.lockutils [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.268612] env[68638]: DEBUG oslo_concurrency.lockutils [None req-44f6e972-9959-4fd0-8b2a-5434f85c2dc8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "90c192bd-b823-414c-b793-260eacc9904f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.049s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.280088] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f683aa82-c844-4bcd-b488-8acd7a4678eb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.288645] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db1a55e-634a-4ab2-b789-f0410244c92b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.323728] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dffb6f34-d160-4f1f-9f12-c1c7a7613d15 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.331676] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26dcee1c-0dc8-4ef8-9fab-ab62e60210a6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.346247] env[68638]: DEBUG nova.compute.provider_tree [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 968.466859] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833984, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.512031] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833983, 'name': CreateVM_Task, 'duration_secs': 0.78067} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.512031] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 968.512828] env[68638]: DEBUG oslo_concurrency.lockutils [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.513010] env[68638]: DEBUG oslo_concurrency.lockutils [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.513359] env[68638]: DEBUG oslo_concurrency.lockutils [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 968.513635] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c845943d-9ea3-4da4-b584-969256e4380d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.519744] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 968.519744] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52b3e324-232e-54a3-3626-a1960b650e36" [ 968.519744] env[68638]: _type = "Task" [ 968.519744] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.527504] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b3e324-232e-54a3-3626-a1960b650e36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.622403] env[68638]: INFO nova.compute.manager [-] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Took 1.39 seconds to deallocate network for instance. [ 968.698339] env[68638]: DEBUG nova.objects.instance [None req-4aa01171-02b7-46a7-a5b9-328a5cebd016 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lazy-loading 'flavor' on Instance uuid 90c192bd-b823-414c-b793-260eacc9904f {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 968.762132] env[68638]: DEBUG nova.compute.manager [req-10f3f68d-84bf-4d1c-8545-00c6a45aba09 req-5954c8b0-9724-44a3-872c-acfd3a63129b service nova] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Received event network-vif-deleted-b541496e-247e-4bbb-bed9-6e9a9aa2a91f {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 968.762536] env[68638]: DEBUG nova.compute.manager [req-10f3f68d-84bf-4d1c-8545-00c6a45aba09 req-5954c8b0-9724-44a3-872c-acfd3a63129b service nova] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Received event network-vif-deleted-6f9df10a-d887-489b-b7e7-a3305f8c5c9e {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 968.835804] env[68638]: DEBUG nova.network.neutron [req-40204652-ed6c-4d80-809a-b849059a5071 req-1eb5ebe9-8c77-467a-b484-83271f23b733 service nova] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Updated VIF entry in instance network info cache for port eb5d1f40-255a-48f5-aacc-0ca9cdafcfa4. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 968.836736] env[68638]: DEBUG nova.network.neutron [req-40204652-ed6c-4d80-809a-b849059a5071 req-1eb5ebe9-8c77-467a-b484-83271f23b733 service nova] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Updating instance_info_cache with network_info: [{"id": "eb5d1f40-255a-48f5-aacc-0ca9cdafcfa4", "address": "fa:16:3e:f0:6e:e3", "network": {"id": "4ccf9e56-9fb3-48ff-bf2d-a35faedb905b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1191830363-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ae89c3992e04141bf24be9d9e84e302", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb5d1f40-25", "ovs_interfaceid": "eb5d1f40-255a-48f5-aacc-0ca9cdafcfa4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.849898] env[68638]: DEBUG nova.scheduler.client.report [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 968.966931] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833984, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.030347] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b3e324-232e-54a3-3626-a1960b650e36, 'name': SearchDatastore_Task, 'duration_secs': 0.014594} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.030698] env[68638]: DEBUG oslo_concurrency.lockutils [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 969.030945] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 969.031202] env[68638]: DEBUG oslo_concurrency.lockutils [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.031348] env[68638]: DEBUG oslo_concurrency.lockutils [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.031527] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 969.031807] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b60fbe09-1e3c-4fc2-a38f-9c108c9e718d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.042192] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 969.042404] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 969.043190] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c22667c-d81c-4886-b9da-316112f165b7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.048733] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 969.048733] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]525c842e-1b12-9582-d062-8e2d4bf7bcbb" [ 969.048733] env[68638]: _type = "Task" [ 969.048733] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.057314] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525c842e-1b12-9582-d062-8e2d4bf7bcbb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.130263] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.205221] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4aa01171-02b7-46a7-a5b9-328a5cebd016 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "refresh_cache-90c192bd-b823-414c-b793-260eacc9904f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.205221] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4aa01171-02b7-46a7-a5b9-328a5cebd016 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquired lock "refresh_cache-90c192bd-b823-414c-b793-260eacc9904f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.205221] env[68638]: DEBUG nova.network.neutron [None req-4aa01171-02b7-46a7-a5b9-328a5cebd016 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 969.205221] env[68638]: DEBUG nova.objects.instance [None req-4aa01171-02b7-46a7-a5b9-328a5cebd016 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lazy-loading 'info_cache' on Instance uuid 90c192bd-b823-414c-b793-260eacc9904f {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 969.340807] env[68638]: DEBUG oslo_concurrency.lockutils [req-40204652-ed6c-4d80-809a-b849059a5071 req-1eb5ebe9-8c77-467a-b484-83271f23b733 service nova] Releasing lock "refresh_cache-94af9123-435f-4ae4-8b6d-82838df61d4e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 969.357476] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.481s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.358180] env[68638]: DEBUG nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 969.361014] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.929s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.363913] env[68638]: INFO nova.compute.claims [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 969.467485] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833984, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.558503] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525c842e-1b12-9582-d062-8e2d4bf7bcbb, 'name': SearchDatastore_Task, 'duration_secs': 0.010649} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.559371] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1ab8377-42c0-49f0-8396-c852e916bcc2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.566260] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 969.566260] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e6e239-88fa-3c3a-947a-8373c3c22300" [ 969.566260] env[68638]: _type = "Task" [ 969.566260] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.572952] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e6e239-88fa-3c3a-947a-8373c3c22300, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.708368] env[68638]: DEBUG nova.objects.base [None req-4aa01171-02b7-46a7-a5b9-328a5cebd016 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Object Instance<90c192bd-b823-414c-b793-260eacc9904f> lazy-loaded attributes: flavor,info_cache {{(pid=68638) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 969.869354] env[68638]: DEBUG nova.compute.utils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 969.872374] env[68638]: DEBUG nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 969.872539] env[68638]: DEBUG nova.network.neutron [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 969.929698] env[68638]: DEBUG nova.policy [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9d18c1746cd542918c421149b896d288', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '96009a6d8b4546db9775a0fda55674b9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 969.967065] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833984, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.077454] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e6e239-88fa-3c3a-947a-8373c3c22300, 'name': SearchDatastore_Task, 'duration_secs': 0.012657} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.077716] env[68638]: DEBUG oslo_concurrency.lockutils [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 970.077975] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 94af9123-435f-4ae4-8b6d-82838df61d4e/94af9123-435f-4ae4-8b6d-82838df61d4e.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 970.078445] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8dfd3097-2a51-4cdc-b61e-71572eb33b85 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.086179] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 970.086179] env[68638]: value = "task-2833985" [ 970.086179] env[68638]: _type = "Task" [ 970.086179] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.095549] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833985, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.318490] env[68638]: DEBUG nova.network.neutron [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Successfully created port: efc7a36d-f958-419e-b67e-5f3724ac0afc {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 970.375190] env[68638]: DEBUG nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 970.470762] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833984, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.357054} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.471132] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/54968b0f-571c-4b4c-be55-3b4b458fd6b8/54968b0f-571c-4b4c-be55-3b4b458fd6b8.vmdk to [datastore1] 61b9bce5-6a3e-4149-a759-d08e2e2301ee/61b9bce5-6a3e-4149-a759-d08e2e2301ee.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 970.471927] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-887f6834-3678-4330-b9fb-3db800774174 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.501762] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 61b9bce5-6a3e-4149-a759-d08e2e2301ee/61b9bce5-6a3e-4149-a759-d08e2e2301ee.vmdk or device None with type streamOptimized {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 970.508337] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a3b74e6-4365-46a2-8d9d-9d346be88f72 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.532987] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 970.532987] env[68638]: value = "task-2833986" [ 970.532987] env[68638]: _type = "Task" [ 970.532987] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.547381] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833986, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.599917] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833985, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.701794] env[68638]: DEBUG nova.network.neutron [None req-4aa01171-02b7-46a7-a5b9-328a5cebd016 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Updating instance_info_cache with network_info: [{"id": "d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3", "address": "fa:16:3e:9a:9b:14", "network": {"id": "c31bf1cd-7568-43c6-9d99-a1e4d63a62a6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1277511990-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa16293a678b4a35ac0837f6ce904e48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2378ad7-a6", "ovs_interfaceid": "d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.777500] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc596146-1029-4901-85aa-22b919a4d064 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.790356] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f24026aa-d5c7-4458-8370-1688a0d65e13 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.821420] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013eb3d6-2765-45f6-8b13-87b59d76b962 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.830461] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6649862-2793-45e0-83fc-9325cb41a578 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.844662] env[68638]: DEBUG nova.compute.provider_tree [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 971.043170] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833986, 'name': ReconfigVM_Task, 'duration_secs': 0.37649} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.043413] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 61b9bce5-6a3e-4149-a759-d08e2e2301ee/61b9bce5-6a3e-4149-a759-d08e2e2301ee.vmdk or device None with type streamOptimized {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 971.044131] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dcc508bc-83fa-4223-82f7-8986e202b968 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.052156] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 971.052156] env[68638]: value = "task-2833987" [ 971.052156] env[68638]: _type = "Task" [ 971.052156] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.060197] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833987, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.095339] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833985, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533957} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.095660] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 94af9123-435f-4ae4-8b6d-82838df61d4e/94af9123-435f-4ae4-8b6d-82838df61d4e.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 971.095949] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 971.096254] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e873cd1d-5b9d-43df-aaea-9c144c7e8f28 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.102318] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 971.102318] env[68638]: value = "task-2833988" [ 971.102318] env[68638]: _type = "Task" [ 971.102318] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.109509] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833988, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.204112] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4aa01171-02b7-46a7-a5b9-328a5cebd016 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Releasing lock "refresh_cache-90c192bd-b823-414c-b793-260eacc9904f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 971.349821] env[68638]: DEBUG nova.scheduler.client.report [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 971.387892] env[68638]: DEBUG nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 971.417053] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 971.417319] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 971.417501] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 971.417700] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 971.417850] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 971.418233] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 971.418500] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 971.418669] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 971.418854] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 971.419039] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 971.419223] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 971.420167] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d37cab-d8e2-4ca2-9a08-5b656bbf1a81 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.428870] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c17a344c-b6be-461c-ab91-d63ce5f33ca8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.561534] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833987, 'name': Rename_Task, 'duration_secs': 0.147027} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.561831] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 971.562069] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-265233ce-4c4f-48a4-943a-b3d7b6a99b55 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.568340] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 971.568340] env[68638]: value = "task-2833989" [ 971.568340] env[68638]: _type = "Task" [ 971.568340] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.576942] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833989, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.612272] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833988, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068429} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.612618] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 971.613436] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26e93a17-2dfb-45e9-af08-343e1f35f222 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.635615] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] 94af9123-435f-4ae4-8b6d-82838df61d4e/94af9123-435f-4ae4-8b6d-82838df61d4e.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 971.636246] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bfdea4a9-123b-4e88-9d03-a6c5dfde8a63 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.654778] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 971.654778] env[68638]: value = "task-2833990" [ 971.654778] env[68638]: _type = "Task" [ 971.654778] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.662606] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833990, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.855739] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.495s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.856435] env[68638]: DEBUG nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 971.859289] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.400s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 971.860849] env[68638]: INFO nova.compute.claims [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 971.980779] env[68638]: DEBUG nova.compute.manager [req-be56c646-e1dc-4a73-a75b-c0513ca9f47b req-61b9853f-06fd-46b6-9141-dad92a7085e9 service nova] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Received event network-vif-plugged-efc7a36d-f958-419e-b67e-5f3724ac0afc {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 971.981010] env[68638]: DEBUG oslo_concurrency.lockutils [req-be56c646-e1dc-4a73-a75b-c0513ca9f47b req-61b9853f-06fd-46b6-9141-dad92a7085e9 service nova] Acquiring lock "ea8f58dc-1542-4723-bf86-369d4dff5f25-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.981255] env[68638]: DEBUG oslo_concurrency.lockutils [req-be56c646-e1dc-4a73-a75b-c0513ca9f47b req-61b9853f-06fd-46b6-9141-dad92a7085e9 service nova] Lock "ea8f58dc-1542-4723-bf86-369d4dff5f25-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 971.981430] env[68638]: DEBUG oslo_concurrency.lockutils [req-be56c646-e1dc-4a73-a75b-c0513ca9f47b req-61b9853f-06fd-46b6-9141-dad92a7085e9 service nova] Lock "ea8f58dc-1542-4723-bf86-369d4dff5f25-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.981595] env[68638]: DEBUG nova.compute.manager [req-be56c646-e1dc-4a73-a75b-c0513ca9f47b req-61b9853f-06fd-46b6-9141-dad92a7085e9 service nova] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] No waiting events found dispatching network-vif-plugged-efc7a36d-f958-419e-b67e-5f3724ac0afc {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 971.981757] env[68638]: WARNING nova.compute.manager [req-be56c646-e1dc-4a73-a75b-c0513ca9f47b req-61b9853f-06fd-46b6-9141-dad92a7085e9 service nova] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Received unexpected event network-vif-plugged-efc7a36d-f958-419e-b67e-5f3724ac0afc for instance with vm_state building and task_state spawning. [ 972.077860] env[68638]: DEBUG oslo_vmware.api [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833989, 'name': PowerOnVM_Task, 'duration_secs': 0.496994} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.078210] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 972.078343] env[68638]: INFO nova.compute.manager [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Took 14.03 seconds to spawn the instance on the hypervisor. [ 972.078619] env[68638]: DEBUG nova.compute.manager [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 972.079426] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e72b1ae3-2925-4173-9d6d-6093c09cc937 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.092462] env[68638]: DEBUG nova.network.neutron [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Successfully updated port: efc7a36d-f958-419e-b67e-5f3724ac0afc {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 972.164190] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833990, 'name': ReconfigVM_Task, 'duration_secs': 0.29919} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.164472] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Reconfigured VM instance instance-00000052 to attach disk [datastore2] 94af9123-435f-4ae4-8b6d-82838df61d4e/94af9123-435f-4ae4-8b6d-82838df61d4e.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 972.165177] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-72613c74-f2af-4930-9b87-dbbbe60db9f0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.171339] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 972.171339] env[68638]: value = "task-2833991" [ 972.171339] env[68638]: _type = "Task" [ 972.171339] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.179463] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833991, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.210611] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aa01171-02b7-46a7-a5b9-328a5cebd016 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 972.210973] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5b578200-818a-429a-9dc8-f8ba0714cb2d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.217207] env[68638]: DEBUG oslo_vmware.api [None req-4aa01171-02b7-46a7-a5b9-328a5cebd016 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 972.217207] env[68638]: value = "task-2833992" [ 972.217207] env[68638]: _type = "Task" [ 972.217207] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.224342] env[68638]: DEBUG oslo_vmware.api [None req-4aa01171-02b7-46a7-a5b9-328a5cebd016 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2833992, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.366276] env[68638]: DEBUG nova.compute.utils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 972.369933] env[68638]: DEBUG nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 972.369933] env[68638]: DEBUG nova.network.neutron [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 972.411912] env[68638]: DEBUG nova.policy [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9d18c1746cd542918c421149b896d288', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '96009a6d8b4546db9775a0fda55674b9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 972.595793] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquiring lock "refresh_cache-ea8f58dc-1542-4723-bf86-369d4dff5f25" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.596179] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquired lock "refresh_cache-ea8f58dc-1542-4723-bf86-369d4dff5f25" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 972.596179] env[68638]: DEBUG nova.network.neutron [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 972.600207] env[68638]: INFO nova.compute.manager [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Took 42.41 seconds to build instance. [ 972.681628] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833991, 'name': Rename_Task, 'duration_secs': 0.191388} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.682179] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 972.682179] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-260e86a8-3227-43c6-9cbc-0acb95d93265 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.690422] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 972.690422] env[68638]: value = "task-2833993" [ 972.690422] env[68638]: _type = "Task" [ 972.690422] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.699349] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833993, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.702031] env[68638]: DEBUG nova.network.neutron [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Successfully created port: 3efe2356-fa43-4dca-b3ba-854d3d82e4cf {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 972.727214] env[68638]: DEBUG oslo_vmware.api [None req-4aa01171-02b7-46a7-a5b9-328a5cebd016 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2833992, 'name': PowerOnVM_Task, 'duration_secs': 0.432698} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.727584] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aa01171-02b7-46a7-a5b9-328a5cebd016 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 972.727805] env[68638]: DEBUG nova.compute.manager [None req-4aa01171-02b7-46a7-a5b9-328a5cebd016 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 972.728772] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fb6df28-c3a7-40be-bc81-e2384bb4dc1f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.870718] env[68638]: DEBUG nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 973.107830] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa6a9f24-cb2e-421d-bfab-f40fe3acad44 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "61b9bce5-6a3e-4149-a759-d08e2e2301ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.927s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.141409] env[68638]: DEBUG nova.network.neutron [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 973.202425] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833993, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.268581] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-170b2374-c8ef-4d2d-98cf-403ceec953fc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.276776] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9aab2bf-a55b-429a-aed0-f09b073817b0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.308897] env[68638]: DEBUG nova.network.neutron [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Updating instance_info_cache with network_info: [{"id": "efc7a36d-f958-419e-b67e-5f3724ac0afc", "address": "fa:16:3e:fc:a0:0a", "network": {"id": "d443579e-c64a-4064-91e4-1cbdd2aed377", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1714690453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96009a6d8b4546db9775a0fda55674b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefc7a36d-f9", "ovs_interfaceid": "efc7a36d-f958-419e-b67e-5f3724ac0afc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.310518] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ec7325-db09-4691-9416-837a576c78fc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.318816] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d93d01da-b4da-4ee1-aef9-7fa2f558bdcf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.333493] env[68638]: DEBUG nova.compute.provider_tree [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 973.457232] env[68638]: DEBUG oslo_concurrency.lockutils [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "61b9bce5-6a3e-4149-a759-d08e2e2301ee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.457519] env[68638]: DEBUG oslo_concurrency.lockutils [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "61b9bce5-6a3e-4149-a759-d08e2e2301ee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.457726] env[68638]: DEBUG oslo_concurrency.lockutils [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "61b9bce5-6a3e-4149-a759-d08e2e2301ee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.457911] env[68638]: DEBUG oslo_concurrency.lockutils [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "61b9bce5-6a3e-4149-a759-d08e2e2301ee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.458098] env[68638]: DEBUG oslo_concurrency.lockutils [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "61b9bce5-6a3e-4149-a759-d08e2e2301ee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.460260] env[68638]: INFO nova.compute.manager [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Terminating instance [ 973.703424] env[68638]: DEBUG oslo_vmware.api [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2833993, 'name': PowerOnVM_Task, 'duration_secs': 0.516601} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.703704] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 973.703902] env[68638]: INFO nova.compute.manager [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Took 9.04 seconds to spawn the instance on the hypervisor. [ 973.704097] env[68638]: DEBUG nova.compute.manager [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 973.704871] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5900567-1457-4229-9464-b924bf4c28c4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.814425] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Releasing lock "refresh_cache-ea8f58dc-1542-4723-bf86-369d4dff5f25" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 973.814931] env[68638]: DEBUG nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Instance network_info: |[{"id": "efc7a36d-f958-419e-b67e-5f3724ac0afc", "address": "fa:16:3e:fc:a0:0a", "network": {"id": "d443579e-c64a-4064-91e4-1cbdd2aed377", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1714690453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96009a6d8b4546db9775a0fda55674b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefc7a36d-f9", "ovs_interfaceid": "efc7a36d-f958-419e-b67e-5f3724ac0afc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 973.815472] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:a0:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2ede0e6-8d7a-4018-bb37-25bf388e9867', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'efc7a36d-f958-419e-b67e-5f3724ac0afc', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 973.827890] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Creating folder: Project (96009a6d8b4546db9775a0fda55674b9). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 973.828262] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9d463dc4-0cc6-4793-9aae-c215a653d773 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.836963] env[68638]: DEBUG nova.scheduler.client.report [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 973.843217] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Created folder: Project (96009a6d8b4546db9775a0fda55674b9) in parent group-v569734. [ 973.843483] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Creating folder: Instances. Parent ref: group-v569964. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 973.844064] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f5b858d5-f9b6-406c-a2dc-d666d1eaf31c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.854540] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Created folder: Instances in parent group-v569964. [ 973.854873] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 973.855843] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 973.856141] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c4c8b356-9a6a-408a-b2f5-f79e85db8495 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.884914] env[68638]: DEBUG nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 973.886829] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 973.886829] env[68638]: value = "task-2833996" [ 973.886829] env[68638]: _type = "Task" [ 973.886829] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.895360] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833996, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.910257] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 973.910506] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 973.910663] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 973.910846] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 973.910994] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 973.911161] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 973.911369] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 973.911527] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 973.911691] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 973.911852] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 973.912038] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 973.913152] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec008387-a41d-4453-944a-88dc9f2c094d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.920764] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f8369bc-2c8f-40af-8175-dd2bd55558fa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.964887] env[68638]: DEBUG nova.compute.manager [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 973.964887] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 973.965164] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f387e42-d988-4488-9f04-15da8ea1f788 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.972459] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 973.972771] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f2b0d206-a675-4c52-8545-277f7a46902b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.979226] env[68638]: DEBUG oslo_vmware.api [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 973.979226] env[68638]: value = "task-2833997" [ 973.979226] env[68638]: _type = "Task" [ 973.979226] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.988720] env[68638]: DEBUG oslo_vmware.api [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833997, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.012524] env[68638]: DEBUG nova.compute.manager [req-1b8af9e2-3cf8-43a4-a79f-d43866f0bd3e req-3e5dc289-7051-4d22-892d-85f6fe10b338 service nova] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Received event network-changed-efc7a36d-f958-419e-b67e-5f3724ac0afc {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 974.012808] env[68638]: DEBUG nova.compute.manager [req-1b8af9e2-3cf8-43a4-a79f-d43866f0bd3e req-3e5dc289-7051-4d22-892d-85f6fe10b338 service nova] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Refreshing instance network info cache due to event network-changed-efc7a36d-f958-419e-b67e-5f3724ac0afc. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 974.013148] env[68638]: DEBUG oslo_concurrency.lockutils [req-1b8af9e2-3cf8-43a4-a79f-d43866f0bd3e req-3e5dc289-7051-4d22-892d-85f6fe10b338 service nova] Acquiring lock "refresh_cache-ea8f58dc-1542-4723-bf86-369d4dff5f25" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.013345] env[68638]: DEBUG oslo_concurrency.lockutils [req-1b8af9e2-3cf8-43a4-a79f-d43866f0bd3e req-3e5dc289-7051-4d22-892d-85f6fe10b338 service nova] Acquired lock "refresh_cache-ea8f58dc-1542-4723-bf86-369d4dff5f25" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 974.013564] env[68638]: DEBUG nova.network.neutron [req-1b8af9e2-3cf8-43a4-a79f-d43866f0bd3e req-3e5dc289-7051-4d22-892d-85f6fe10b338 service nova] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Refreshing network info cache for port efc7a36d-f958-419e-b67e-5f3724ac0afc {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 974.220802] env[68638]: INFO nova.compute.manager [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Took 36.25 seconds to build instance. [ 974.309079] env[68638]: DEBUG nova.network.neutron [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Successfully updated port: 3efe2356-fa43-4dca-b3ba-854d3d82e4cf {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 974.345502] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.486s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 974.346130] env[68638]: DEBUG nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 974.348501] env[68638]: DEBUG oslo_concurrency.lockutils [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.713s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 974.351097] env[68638]: INFO nova.compute.claims [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 974.397564] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2833996, 'name': CreateVM_Task, 'duration_secs': 0.331242} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.397740] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 974.398463] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.398632] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 974.398992] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 974.399269] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3946466-faab-41c8-92cc-c9eaf0dd1329 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.403587] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 974.403587] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52923d7d-4019-c83c-9e66-c0c67ef15737" [ 974.403587] env[68638]: _type = "Task" [ 974.403587] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.411457] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52923d7d-4019-c83c-9e66-c0c67ef15737, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.489228] env[68638]: DEBUG oslo_vmware.api [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833997, 'name': PowerOffVM_Task, 'duration_secs': 0.211055} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.489228] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 974.489447] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 974.489565] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8ce9ea23-c74c-493d-801d-356f6d4acdff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.544584] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 974.544866] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 974.545090] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Deleting the datastore file [datastore1] 61b9bce5-6a3e-4149-a759-d08e2e2301ee {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 974.545346] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-85a4b52a-7746-4b25-9f72-4fc8371e6bca {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.552043] env[68638]: DEBUG oslo_vmware.api [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 974.552043] env[68638]: value = "task-2833999" [ 974.552043] env[68638]: _type = "Task" [ 974.552043] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.559674] env[68638]: DEBUG oslo_vmware.api [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833999, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.724027] env[68638]: DEBUG oslo_concurrency.lockutils [None req-655ccd5b-b111-45b1-8798-a02b572688ba tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "94af9123-435f-4ae4-8b6d-82838df61d4e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.762s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 974.759327] env[68638]: DEBUG nova.network.neutron [req-1b8af9e2-3cf8-43a4-a79f-d43866f0bd3e req-3e5dc289-7051-4d22-892d-85f6fe10b338 service nova] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Updated VIF entry in instance network info cache for port efc7a36d-f958-419e-b67e-5f3724ac0afc. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 974.759849] env[68638]: DEBUG nova.network.neutron [req-1b8af9e2-3cf8-43a4-a79f-d43866f0bd3e req-3e5dc289-7051-4d22-892d-85f6fe10b338 service nova] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Updating instance_info_cache with network_info: [{"id": "efc7a36d-f958-419e-b67e-5f3724ac0afc", "address": "fa:16:3e:fc:a0:0a", "network": {"id": "d443579e-c64a-4064-91e4-1cbdd2aed377", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1714690453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96009a6d8b4546db9775a0fda55674b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefc7a36d-f9", "ovs_interfaceid": "efc7a36d-f958-419e-b67e-5f3724ac0afc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.812488] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquiring lock "refresh_cache-32d43fce-837d-41d9-be11-a0c3cdb1694b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.812488] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquired lock "refresh_cache-32d43fce-837d-41d9-be11-a0c3cdb1694b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 974.812949] env[68638]: DEBUG nova.network.neutron [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 974.854161] env[68638]: DEBUG nova.compute.utils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 974.858902] env[68638]: DEBUG nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 974.859125] env[68638]: DEBUG nova.network.neutron [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 974.896067] env[68638]: DEBUG nova.policy [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9d18c1746cd542918c421149b896d288', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '96009a6d8b4546db9775a0fda55674b9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 974.916747] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52923d7d-4019-c83c-9e66-c0c67ef15737, 'name': SearchDatastore_Task, 'duration_secs': 0.009967} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.917138] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 974.917386] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 974.917624] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.917774] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 974.917957] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 974.918254] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9a0e916e-9f8f-42c4-8049-3a8d328e305e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.928867] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 974.928867] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 974.929312] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01602ac2-9d25-4b18-9dfa-f628a1481df5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.935115] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 974.935115] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d7bde2-627e-4b76-dd39-c75af198aede" [ 974.935115] env[68638]: _type = "Task" [ 974.935115] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.943033] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d7bde2-627e-4b76-dd39-c75af198aede, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.062570] env[68638]: DEBUG oslo_vmware.api [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2833999, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131938} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.062886] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 975.063102] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 975.063282] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 975.063530] env[68638]: INFO nova.compute.manager [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Took 1.10 seconds to destroy the instance on the hypervisor. [ 975.063790] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 975.063986] env[68638]: DEBUG nova.compute.manager [-] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 975.064096] env[68638]: DEBUG nova.network.neutron [-] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 975.252212] env[68638]: DEBUG nova.network.neutron [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Successfully created port: 02b188c7-b3e5-45fe-b1c8-8af03b12180b {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 975.262484] env[68638]: DEBUG oslo_concurrency.lockutils [req-1b8af9e2-3cf8-43a4-a79f-d43866f0bd3e req-3e5dc289-7051-4d22-892d-85f6fe10b338 service nova] Releasing lock "refresh_cache-ea8f58dc-1542-4723-bf86-369d4dff5f25" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 975.350386] env[68638]: DEBUG nova.network.neutron [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 975.359261] env[68638]: DEBUG nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 975.455597] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d7bde2-627e-4b76-dd39-c75af198aede, 'name': SearchDatastore_Task, 'duration_secs': 0.009085} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.459181] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdb4d334-7397-4dd6-8d25-817bb680e571 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.465473] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 975.465473] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]521fb28b-7a5d-dff1-27a3-d040e40d155d" [ 975.465473] env[68638]: _type = "Task" [ 975.465473] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.471893] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]521fb28b-7a5d-dff1-27a3-d040e40d155d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.526061] env[68638]: DEBUG nova.network.neutron [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Updating instance_info_cache with network_info: [{"id": "3efe2356-fa43-4dca-b3ba-854d3d82e4cf", "address": "fa:16:3e:72:92:71", "network": {"id": "d443579e-c64a-4064-91e4-1cbdd2aed377", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1714690453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96009a6d8b4546db9775a0fda55674b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3efe2356-fa", "ovs_interfaceid": "3efe2356-fa43-4dca-b3ba-854d3d82e4cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.737192] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db2c317-dff7-4fde-b510-450548cd5c8d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.746609] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b93d50fe-1a32-4225-88b9-35b2ab3ac13a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.781828] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8d56ee7-1b8e-4519-9a1c-4fed5c621211 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.789891] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa356811-d97e-4fa2-840d-8931b82cb699 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.804393] env[68638]: DEBUG nova.compute.provider_tree [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 975.975047] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]521fb28b-7a5d-dff1-27a3-d040e40d155d, 'name': SearchDatastore_Task, 'duration_secs': 0.032695} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.975756] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 975.975756] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] ea8f58dc-1542-4723-bf86-369d4dff5f25/ea8f58dc-1542-4723-bf86-369d4dff5f25.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 975.976039] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ff31a70-8558-467d-af8c-d8060074b531 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.981677] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 975.981677] env[68638]: value = "task-2834000" [ 975.981677] env[68638]: _type = "Task" [ 975.981677] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.989188] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834000, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.028874] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Releasing lock "refresh_cache-32d43fce-837d-41d9-be11-a0c3cdb1694b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 976.029244] env[68638]: DEBUG nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Instance network_info: |[{"id": "3efe2356-fa43-4dca-b3ba-854d3d82e4cf", "address": "fa:16:3e:72:92:71", "network": {"id": "d443579e-c64a-4064-91e4-1cbdd2aed377", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1714690453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96009a6d8b4546db9775a0fda55674b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3efe2356-fa", "ovs_interfaceid": "3efe2356-fa43-4dca-b3ba-854d3d82e4cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 976.029677] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:72:92:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2ede0e6-8d7a-4018-bb37-25bf388e9867', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3efe2356-fa43-4dca-b3ba-854d3d82e4cf', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 976.037521] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 976.039519] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 976.041005] env[68638]: DEBUG nova.compute.manager [req-5fd99b30-99fe-493f-b09c-f0f0b245aebe req-72e3b458-c877-45db-a3c5-64bcd913a049 service nova] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Received event network-vif-plugged-3efe2356-fa43-4dca-b3ba-854d3d82e4cf {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 976.041215] env[68638]: DEBUG oslo_concurrency.lockutils [req-5fd99b30-99fe-493f-b09c-f0f0b245aebe req-72e3b458-c877-45db-a3c5-64bcd913a049 service nova] Acquiring lock "32d43fce-837d-41d9-be11-a0c3cdb1694b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.041419] env[68638]: DEBUG oslo_concurrency.lockutils [req-5fd99b30-99fe-493f-b09c-f0f0b245aebe req-72e3b458-c877-45db-a3c5-64bcd913a049 service nova] Lock "32d43fce-837d-41d9-be11-a0c3cdb1694b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 976.041580] env[68638]: DEBUG oslo_concurrency.lockutils [req-5fd99b30-99fe-493f-b09c-f0f0b245aebe req-72e3b458-c877-45db-a3c5-64bcd913a049 service nova] Lock "32d43fce-837d-41d9-be11-a0c3cdb1694b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 976.041742] env[68638]: DEBUG nova.compute.manager [req-5fd99b30-99fe-493f-b09c-f0f0b245aebe req-72e3b458-c877-45db-a3c5-64bcd913a049 service nova] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] No waiting events found dispatching network-vif-plugged-3efe2356-fa43-4dca-b3ba-854d3d82e4cf {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 976.041901] env[68638]: WARNING nova.compute.manager [req-5fd99b30-99fe-493f-b09c-f0f0b245aebe req-72e3b458-c877-45db-a3c5-64bcd913a049 service nova] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Received unexpected event network-vif-plugged-3efe2356-fa43-4dca-b3ba-854d3d82e4cf for instance with vm_state building and task_state spawning. [ 976.042078] env[68638]: DEBUG nova.compute.manager [req-5fd99b30-99fe-493f-b09c-f0f0b245aebe req-72e3b458-c877-45db-a3c5-64bcd913a049 service nova] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Received event network-changed-3efe2356-fa43-4dca-b3ba-854d3d82e4cf {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 976.042233] env[68638]: DEBUG nova.compute.manager [req-5fd99b30-99fe-493f-b09c-f0f0b245aebe req-72e3b458-c877-45db-a3c5-64bcd913a049 service nova] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Refreshing instance network info cache due to event network-changed-3efe2356-fa43-4dca-b3ba-854d3d82e4cf. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 976.042413] env[68638]: DEBUG oslo_concurrency.lockutils [req-5fd99b30-99fe-493f-b09c-f0f0b245aebe req-72e3b458-c877-45db-a3c5-64bcd913a049 service nova] Acquiring lock "refresh_cache-32d43fce-837d-41d9-be11-a0c3cdb1694b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.042548] env[68638]: DEBUG oslo_concurrency.lockutils [req-5fd99b30-99fe-493f-b09c-f0f0b245aebe req-72e3b458-c877-45db-a3c5-64bcd913a049 service nova] Acquired lock "refresh_cache-32d43fce-837d-41d9-be11-a0c3cdb1694b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 976.042698] env[68638]: DEBUG nova.network.neutron [req-5fd99b30-99fe-493f-b09c-f0f0b245aebe req-72e3b458-c877-45db-a3c5-64bcd913a049 service nova] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Refreshing network info cache for port 3efe2356-fa43-4dca-b3ba-854d3d82e4cf {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 976.043911] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-880fd8c6-8262-46e1-b197-6bb26a8270bd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.059823] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8540ac2e-f004-43ab-a2de-b1345b55d216 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "94af9123-435f-4ae4-8b6d-82838df61d4e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.060050] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8540ac2e-f004-43ab-a2de-b1345b55d216 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "94af9123-435f-4ae4-8b6d-82838df61d4e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 976.066874] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 976.066874] env[68638]: value = "task-2834001" [ 976.066874] env[68638]: _type = "Task" [ 976.066874] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.075015] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834001, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.189990] env[68638]: DEBUG nova.network.neutron [-] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.308546] env[68638]: DEBUG nova.scheduler.client.report [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 976.375524] env[68638]: DEBUG nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 976.416971] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 976.416971] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 976.417206] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 976.417449] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 976.417948] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 976.418161] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 976.418526] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 976.418844] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 976.419087] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 976.419702] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 976.419702] env[68638]: DEBUG nova.virt.hardware [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 976.421468] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da871b02-932c-4f71-92ea-f297d2a7d0b7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.432237] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a6c475b-4046-41c1-9e57-67d85e365b7e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.493340] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834000, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.563126] env[68638]: DEBUG nova.compute.utils [None req-8540ac2e-f004-43ab-a2de-b1345b55d216 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 976.579183] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834001, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.693317] env[68638]: INFO nova.compute.manager [-] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Took 1.63 seconds to deallocate network for instance. [ 976.785782] env[68638]: DEBUG nova.network.neutron [req-5fd99b30-99fe-493f-b09c-f0f0b245aebe req-72e3b458-c877-45db-a3c5-64bcd913a049 service nova] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Updated VIF entry in instance network info cache for port 3efe2356-fa43-4dca-b3ba-854d3d82e4cf. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 976.786223] env[68638]: DEBUG nova.network.neutron [req-5fd99b30-99fe-493f-b09c-f0f0b245aebe req-72e3b458-c877-45db-a3c5-64bcd913a049 service nova] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Updating instance_info_cache with network_info: [{"id": "3efe2356-fa43-4dca-b3ba-854d3d82e4cf", "address": "fa:16:3e:72:92:71", "network": {"id": "d443579e-c64a-4064-91e4-1cbdd2aed377", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1714690453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96009a6d8b4546db9775a0fda55674b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3efe2356-fa", "ovs_interfaceid": "3efe2356-fa43-4dca-b3ba-854d3d82e4cf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.813984] env[68638]: DEBUG oslo_concurrency.lockutils [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.465s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 976.814957] env[68638]: DEBUG nova.compute.manager [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 976.817499] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.704s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 976.817720] env[68638]: DEBUG nova.objects.instance [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Lazy-loading 'resources' on Instance uuid 27ff37a6-de93-4a4b-904f-a91fdb8b0aff {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 976.992181] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834000, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.652854} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.992496] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] ea8f58dc-1542-4723-bf86-369d4dff5f25/ea8f58dc-1542-4723-bf86-369d4dff5f25.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 976.992739] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 976.993015] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-06d4ff82-780d-4a3b-b817-bbbb90bd0d1b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.999787] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 976.999787] env[68638]: value = "task-2834002" [ 976.999787] env[68638]: _type = "Task" [ 976.999787] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.007346] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834002, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.066348] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8540ac2e-f004-43ab-a2de-b1345b55d216 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "94af9123-435f-4ae4-8b6d-82838df61d4e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 977.071608] env[68638]: DEBUG nova.network.neutron [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Successfully updated port: 02b188c7-b3e5-45fe-b1c8-8af03b12180b {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 977.077680] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834001, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.200421] env[68638]: DEBUG oslo_concurrency.lockutils [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 977.288683] env[68638]: DEBUG oslo_concurrency.lockutils [req-5fd99b30-99fe-493f-b09c-f0f0b245aebe req-72e3b458-c877-45db-a3c5-64bcd913a049 service nova] Releasing lock "refresh_cache-32d43fce-837d-41d9-be11-a0c3cdb1694b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 977.288961] env[68638]: DEBUG nova.compute.manager [req-5fd99b30-99fe-493f-b09c-f0f0b245aebe req-72e3b458-c877-45db-a3c5-64bcd913a049 service nova] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Received event network-vif-deleted-7200b9ae-be3f-4868-8707-b645c20bc18e {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 977.289164] env[68638]: INFO nova.compute.manager [req-5fd99b30-99fe-493f-b09c-f0f0b245aebe req-72e3b458-c877-45db-a3c5-64bcd913a049 service nova] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Neutron deleted interface 7200b9ae-be3f-4868-8707-b645c20bc18e; detaching it from the instance and deleting it from the info cache [ 977.289338] env[68638]: DEBUG nova.network.neutron [req-5fd99b30-99fe-493f-b09c-f0f0b245aebe req-72e3b458-c877-45db-a3c5-64bcd913a049 service nova] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.322418] env[68638]: DEBUG nova.compute.utils [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 977.324546] env[68638]: DEBUG nova.compute.manager [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 977.324723] env[68638]: DEBUG nova.network.neutron [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 977.361079] env[68638]: DEBUG nova.policy [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '847f535ec96f4ef0b73ae277199b4533', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98a35cb6ae4d4c8688fb89d7da0b2dd1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 977.509852] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834002, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070125} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.510154] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 977.510974] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b37f5c8b-ce78-4544-b965-f8cda433510a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.539968] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] ea8f58dc-1542-4723-bf86-369d4dff5f25/ea8f58dc-1542-4723-bf86-369d4dff5f25.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 977.542070] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ac21151-7ba0-4566-a423-51310f6d2410 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.561812] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 977.561812] env[68638]: value = "task-2834003" [ 977.561812] env[68638]: _type = "Task" [ 977.561812] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.576779] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834003, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.576779] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquiring lock "refresh_cache-e9b8e5ad-4d47-48ad-995f-b28d0230df0f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.576779] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquired lock "refresh_cache-e9b8e5ad-4d47-48ad-995f-b28d0230df0f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 977.576779] env[68638]: DEBUG nova.network.neutron [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 977.582701] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834001, 'name': CreateVM_Task, 'duration_secs': 1.410428} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.582882] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 977.583711] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.584631] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 977.584631] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 977.584631] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e2f57e1-e9f4-4187-b269-bdbb598a1de3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.590129] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 977.590129] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]525a1c4d-63ee-eab6-a557-38a63b04c273" [ 977.590129] env[68638]: _type = "Task" [ 977.590129] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.602779] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525a1c4d-63ee-eab6-a557-38a63b04c273, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.701108] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e81619d9-2e5c-4ca8-919f-ba91bcef7694 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.709165] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaf36f72-28b2-44d2-b114-a7e62794b881 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.738258] env[68638]: DEBUG nova.network.neutron [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Successfully created port: 4b1efa68-0e83-4c79-94dc-33b0388cdbc1 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 977.740582] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bbb78bf-8484-4d34-a36e-ea9bfe224689 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.748404] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c67e96b-a697-49d1-822a-0ceb0a59f4fe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.762419] env[68638]: DEBUG nova.compute.provider_tree [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 977.791713] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c27afac2-7bba-441f-9b29-5d04cb1ee6b2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.801185] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aac65b2-f60f-493c-b327-3744c1a94fe5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.832475] env[68638]: DEBUG nova.compute.manager [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 977.834920] env[68638]: DEBUG nova.compute.manager [req-5fd99b30-99fe-493f-b09c-f0f0b245aebe req-72e3b458-c877-45db-a3c5-64bcd913a049 service nova] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Detach interface failed, port_id=7200b9ae-be3f-4868-8707-b645c20bc18e, reason: Instance 61b9bce5-6a3e-4149-a759-d08e2e2301ee could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 978.068823] env[68638]: DEBUG nova.compute.manager [req-91c68b48-1777-4c9d-927c-9159e6b52452 req-32f6af8f-de27-4f79-992c-fa37546c8fce service nova] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Received event network-vif-plugged-02b188c7-b3e5-45fe-b1c8-8af03b12180b {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 978.069078] env[68638]: DEBUG oslo_concurrency.lockutils [req-91c68b48-1777-4c9d-927c-9159e6b52452 req-32f6af8f-de27-4f79-992c-fa37546c8fce service nova] Acquiring lock "e9b8e5ad-4d47-48ad-995f-b28d0230df0f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.069270] env[68638]: DEBUG oslo_concurrency.lockutils [req-91c68b48-1777-4c9d-927c-9159e6b52452 req-32f6af8f-de27-4f79-992c-fa37546c8fce service nova] Lock "e9b8e5ad-4d47-48ad-995f-b28d0230df0f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.069434] env[68638]: DEBUG oslo_concurrency.lockutils [req-91c68b48-1777-4c9d-927c-9159e6b52452 req-32f6af8f-de27-4f79-992c-fa37546c8fce service nova] Lock "e9b8e5ad-4d47-48ad-995f-b28d0230df0f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.069601] env[68638]: DEBUG nova.compute.manager [req-91c68b48-1777-4c9d-927c-9159e6b52452 req-32f6af8f-de27-4f79-992c-fa37546c8fce service nova] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] No waiting events found dispatching network-vif-plugged-02b188c7-b3e5-45fe-b1c8-8af03b12180b {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 978.069761] env[68638]: WARNING nova.compute.manager [req-91c68b48-1777-4c9d-927c-9159e6b52452 req-32f6af8f-de27-4f79-992c-fa37546c8fce service nova] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Received unexpected event network-vif-plugged-02b188c7-b3e5-45fe-b1c8-8af03b12180b for instance with vm_state building and task_state spawning. [ 978.069916] env[68638]: DEBUG nova.compute.manager [req-91c68b48-1777-4c9d-927c-9159e6b52452 req-32f6af8f-de27-4f79-992c-fa37546c8fce service nova] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Received event network-changed-02b188c7-b3e5-45fe-b1c8-8af03b12180b {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 978.070104] env[68638]: DEBUG nova.compute.manager [req-91c68b48-1777-4c9d-927c-9159e6b52452 req-32f6af8f-de27-4f79-992c-fa37546c8fce service nova] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Refreshing instance network info cache due to event network-changed-02b188c7-b3e5-45fe-b1c8-8af03b12180b. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 978.070278] env[68638]: DEBUG oslo_concurrency.lockutils [req-91c68b48-1777-4c9d-927c-9159e6b52452 req-32f6af8f-de27-4f79-992c-fa37546c8fce service nova] Acquiring lock "refresh_cache-e9b8e5ad-4d47-48ad-995f-b28d0230df0f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.076391] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834003, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.101678] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525a1c4d-63ee-eab6-a557-38a63b04c273, 'name': SearchDatastore_Task, 'duration_secs': 0.023469} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.101970] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 978.102260] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 978.102519] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.102701] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 978.102844] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 978.103120] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8c666d52-f7fe-4821-9cce-4549cfc22bb2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.117840] env[68638]: DEBUG nova.network.neutron [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 978.120874] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 978.121078] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 978.122115] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8540ac2e-f004-43ab-a2de-b1345b55d216 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "94af9123-435f-4ae4-8b6d-82838df61d4e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.122337] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8540ac2e-f004-43ab-a2de-b1345b55d216 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "94af9123-435f-4ae4-8b6d-82838df61d4e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.122547] env[68638]: INFO nova.compute.manager [None req-8540ac2e-f004-43ab-a2de-b1345b55d216 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Attaching volume c34aafd8-4df4-43bc-830e-f0985bda456b to /dev/sdb [ 978.123928] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50acc118-969b-47da-a939-cb92e18a45ba {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.129585] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 978.129585] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5293fee8-ba1d-6783-8776-a31f1e1432d8" [ 978.129585] env[68638]: _type = "Task" [ 978.129585] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.141071] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5293fee8-ba1d-6783-8776-a31f1e1432d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.158554] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb01f5a4-df44-4794-a497-418f1ea9ca8c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.165218] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07f0484-b310-414b-bd12-e4bc2e5917cc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.179514] env[68638]: DEBUG nova.virt.block_device [None req-8540ac2e-f004-43ab-a2de-b1345b55d216 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Updating existing volume attachment record: 9f056651-2f3e-4c54-a8cd-f59b3971cdc1 {{(pid=68638) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 978.253098] env[68638]: DEBUG nova.network.neutron [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Updating instance_info_cache with network_info: [{"id": "02b188c7-b3e5-45fe-b1c8-8af03b12180b", "address": "fa:16:3e:dc:10:23", "network": {"id": "d443579e-c64a-4064-91e4-1cbdd2aed377", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1714690453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96009a6d8b4546db9775a0fda55674b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02b188c7-b3", "ovs_interfaceid": "02b188c7-b3e5-45fe-b1c8-8af03b12180b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.266684] env[68638]: DEBUG nova.scheduler.client.report [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 978.573079] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834003, 'name': ReconfigVM_Task, 'duration_secs': 0.800394} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.573382] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Reconfigured VM instance instance-00000053 to attach disk [datastore2] ea8f58dc-1542-4723-bf86-369d4dff5f25/ea8f58dc-1542-4723-bf86-369d4dff5f25.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 978.574049] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0720e15-53f2-43b6-a2da-b10b391d024d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.580223] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 978.580223] env[68638]: value = "task-2834007" [ 978.580223] env[68638]: _type = "Task" [ 978.580223] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.588368] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834007, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.641794] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5293fee8-ba1d-6783-8776-a31f1e1432d8, 'name': SearchDatastore_Task, 'duration_secs': 0.031928} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.642596] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-405d6cb0-f193-46ad-922c-67fad2a5c26a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.648295] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 978.648295] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]527d5ba0-e39c-29db-b8bc-18971c58ef6d" [ 978.648295] env[68638]: _type = "Task" [ 978.648295] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.656733] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527d5ba0-e39c-29db-b8bc-18971c58ef6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.756565] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Releasing lock "refresh_cache-e9b8e5ad-4d47-48ad-995f-b28d0230df0f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 978.756923] env[68638]: DEBUG nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Instance network_info: |[{"id": "02b188c7-b3e5-45fe-b1c8-8af03b12180b", "address": "fa:16:3e:dc:10:23", "network": {"id": "d443579e-c64a-4064-91e4-1cbdd2aed377", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1714690453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96009a6d8b4546db9775a0fda55674b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02b188c7-b3", "ovs_interfaceid": "02b188c7-b3e5-45fe-b1c8-8af03b12180b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 978.757254] env[68638]: DEBUG oslo_concurrency.lockutils [req-91c68b48-1777-4c9d-927c-9159e6b52452 req-32f6af8f-de27-4f79-992c-fa37546c8fce service nova] Acquired lock "refresh_cache-e9b8e5ad-4d47-48ad-995f-b28d0230df0f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 978.757430] env[68638]: DEBUG nova.network.neutron [req-91c68b48-1777-4c9d-927c-9159e6b52452 req-32f6af8f-de27-4f79-992c-fa37546c8fce service nova] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Refreshing network info cache for port 02b188c7-b3e5-45fe-b1c8-8af03b12180b {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 978.758690] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:10:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2ede0e6-8d7a-4018-bb37-25bf388e9867', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '02b188c7-b3e5-45fe-b1c8-8af03b12180b', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 978.766276] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 978.767409] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 978.767667] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c27c3cdb-3037-4d0b-b3ee-eee6db73a89d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.783800] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.966s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.786077] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.299s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.786307] env[68638]: DEBUG nova.objects.instance [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Lazy-loading 'resources' on Instance uuid 1bc685aa-4e88-402f-b581-d179706b12a5 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 978.794364] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 978.794364] env[68638]: value = "task-2834008" [ 978.794364] env[68638]: _type = "Task" [ 978.794364] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.806027] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834008, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.808986] env[68638]: INFO nova.scheduler.client.report [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Deleted allocations for instance 27ff37a6-de93-4a4b-904f-a91fdb8b0aff [ 978.842614] env[68638]: DEBUG nova.compute.manager [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 978.872619] env[68638]: DEBUG nova.virt.hardware [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 978.873948] env[68638]: DEBUG nova.virt.hardware [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 978.873948] env[68638]: DEBUG nova.virt.hardware [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 978.873948] env[68638]: DEBUG nova.virt.hardware [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 978.873948] env[68638]: DEBUG nova.virt.hardware [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 978.873948] env[68638]: DEBUG nova.virt.hardware [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 978.874233] env[68638]: DEBUG nova.virt.hardware [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 978.874233] env[68638]: DEBUG nova.virt.hardware [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 978.874313] env[68638]: DEBUG nova.virt.hardware [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 978.874482] env[68638]: DEBUG nova.virt.hardware [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 978.874718] env[68638]: DEBUG nova.virt.hardware [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 978.876040] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b2b601-354b-470f-9481-3790a0eb83dc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.884626] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bf5061a-2cc9-4253-a825-28c40ee63321 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.089844] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834007, 'name': Rename_Task, 'duration_secs': 0.133248} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.090153] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 979.090409] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-96a23832-2e44-447b-82ff-35e5486c6a63 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.096024] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 979.096024] env[68638]: value = "task-2834009" [ 979.096024] env[68638]: _type = "Task" [ 979.096024] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.108741] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834009, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.160867] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527d5ba0-e39c-29db-b8bc-18971c58ef6d, 'name': SearchDatastore_Task, 'duration_secs': 0.008665} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.161343] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 979.161661] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 32d43fce-837d-41d9-be11-a0c3cdb1694b/32d43fce-837d-41d9-be11-a0c3cdb1694b.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 979.161991] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-994d85a6-751d-430d-a50c-3438460a46c2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.168467] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 979.168467] env[68638]: value = "task-2834010" [ 979.168467] env[68638]: _type = "Task" [ 979.168467] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.177121] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834010, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.225388] env[68638]: DEBUG nova.network.neutron [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Successfully updated port: 4b1efa68-0e83-4c79-94dc-33b0388cdbc1 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 979.305991] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834008, 'name': CreateVM_Task, 'duration_secs': 0.328673} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.306192] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 979.306848] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.307015] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 979.307373] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 979.307628] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fd7770a-6351-453e-849d-738fe3bc2136 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.312269] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 979.312269] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]526a641e-9514-ba5e-46b3-48c81d413599" [ 979.312269] env[68638]: _type = "Task" [ 979.312269] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.321027] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1fc98d19-f3c7-4893-94c9-ab62e12ab83e tempest-ServersWithSpecificFlavorTestJSON-1487475275 tempest-ServersWithSpecificFlavorTestJSON-1487475275-project-member] Lock "27ff37a6-de93-4a4b-904f-a91fdb8b0aff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.967s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 979.327868] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]526a641e-9514-ba5e-46b3-48c81d413599, 'name': SearchDatastore_Task, 'duration_secs': 0.009575} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.330512] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 979.330758] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 979.330982] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.331143] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 979.331324] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 979.332119] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a662788-4313-44b6-a633-8947262ae561 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.344191] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 979.344384] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 979.345558] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09ee817a-c514-4601-a85c-832714f98b37 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.351540] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 979.351540] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52493b27-ca17-2f79-8730-17f2d9c4a3ed" [ 979.351540] env[68638]: _type = "Task" [ 979.351540] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.365389] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52493b27-ca17-2f79-8730-17f2d9c4a3ed, 'name': SearchDatastore_Task, 'duration_secs': 0.011134} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.366246] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06bc62f7-88bc-4113-88ca-7cdf733dae7d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.374143] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 979.374143] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d2d876-b7bf-67e1-f257-da76baa32a8c" [ 979.374143] env[68638]: _type = "Task" [ 979.374143] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.383259] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d2d876-b7bf-67e1-f257-da76baa32a8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.524438] env[68638]: DEBUG nova.network.neutron [req-91c68b48-1777-4c9d-927c-9159e6b52452 req-32f6af8f-de27-4f79-992c-fa37546c8fce service nova] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Updated VIF entry in instance network info cache for port 02b188c7-b3e5-45fe-b1c8-8af03b12180b. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 979.524887] env[68638]: DEBUG nova.network.neutron [req-91c68b48-1777-4c9d-927c-9159e6b52452 req-32f6af8f-de27-4f79-992c-fa37546c8fce service nova] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Updating instance_info_cache with network_info: [{"id": "02b188c7-b3e5-45fe-b1c8-8af03b12180b", "address": "fa:16:3e:dc:10:23", "network": {"id": "d443579e-c64a-4064-91e4-1cbdd2aed377", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1714690453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96009a6d8b4546db9775a0fda55674b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02b188c7-b3", "ovs_interfaceid": "02b188c7-b3e5-45fe-b1c8-8af03b12180b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.610825] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834009, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.675077] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e5ef486-04fb-4918-b2d5-065b56d7b7e0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.685042] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834010, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.686184] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5754d348-8e51-4031-a353-1877309cc253 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.721259] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad4f9fd7-c274-439c-a6c5-927ae48605a4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.728795] env[68638]: DEBUG oslo_concurrency.lockutils [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "refresh_cache-43e0eed3-bc25-476d-a9ef-6b132514cf90" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.728795] env[68638]: DEBUG oslo_concurrency.lockutils [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired lock "refresh_cache-43e0eed3-bc25-476d-a9ef-6b132514cf90" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 979.728795] env[68638]: DEBUG nova.network.neutron [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 979.730597] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f32c306-339a-499e-9f22-f95e0be0a0f4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.744744] env[68638]: DEBUG nova.compute.provider_tree [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 979.884467] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d2d876-b7bf-67e1-f257-da76baa32a8c, 'name': SearchDatastore_Task, 'duration_secs': 0.010332} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.885938] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 979.885938] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] e9b8e5ad-4d47-48ad-995f-b28d0230df0f/e9b8e5ad-4d47-48ad-995f-b28d0230df0f.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 979.885938] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-309604b0-9a8c-4baf-96c5-93c8268734ee {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.892990] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 979.892990] env[68638]: value = "task-2834011" [ 979.892990] env[68638]: _type = "Task" [ 979.892990] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.902844] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834011, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.028078] env[68638]: DEBUG oslo_concurrency.lockutils [req-91c68b48-1777-4c9d-927c-9159e6b52452 req-32f6af8f-de27-4f79-992c-fa37546c8fce service nova] Releasing lock "refresh_cache-e9b8e5ad-4d47-48ad-995f-b28d0230df0f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 980.094908] env[68638]: DEBUG nova.compute.manager [req-28e744a8-04e4-4388-b82f-c59e754e9dc8 req-39553de7-e5c6-4676-94c6-275303a88be0 service nova] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Received event network-vif-plugged-4b1efa68-0e83-4c79-94dc-33b0388cdbc1 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 980.095220] env[68638]: DEBUG oslo_concurrency.lockutils [req-28e744a8-04e4-4388-b82f-c59e754e9dc8 req-39553de7-e5c6-4676-94c6-275303a88be0 service nova] Acquiring lock "43e0eed3-bc25-476d-a9ef-6b132514cf90-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.095445] env[68638]: DEBUG oslo_concurrency.lockutils [req-28e744a8-04e4-4388-b82f-c59e754e9dc8 req-39553de7-e5c6-4676-94c6-275303a88be0 service nova] Lock "43e0eed3-bc25-476d-a9ef-6b132514cf90-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.095613] env[68638]: DEBUG oslo_concurrency.lockutils [req-28e744a8-04e4-4388-b82f-c59e754e9dc8 req-39553de7-e5c6-4676-94c6-275303a88be0 service nova] Lock "43e0eed3-bc25-476d-a9ef-6b132514cf90-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.095822] env[68638]: DEBUG nova.compute.manager [req-28e744a8-04e4-4388-b82f-c59e754e9dc8 req-39553de7-e5c6-4676-94c6-275303a88be0 service nova] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] No waiting events found dispatching network-vif-plugged-4b1efa68-0e83-4c79-94dc-33b0388cdbc1 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 980.096070] env[68638]: WARNING nova.compute.manager [req-28e744a8-04e4-4388-b82f-c59e754e9dc8 req-39553de7-e5c6-4676-94c6-275303a88be0 service nova] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Received unexpected event network-vif-plugged-4b1efa68-0e83-4c79-94dc-33b0388cdbc1 for instance with vm_state building and task_state spawning. [ 980.096312] env[68638]: DEBUG nova.compute.manager [req-28e744a8-04e4-4388-b82f-c59e754e9dc8 req-39553de7-e5c6-4676-94c6-275303a88be0 service nova] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Received event network-changed-4b1efa68-0e83-4c79-94dc-33b0388cdbc1 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 980.096490] env[68638]: DEBUG nova.compute.manager [req-28e744a8-04e4-4388-b82f-c59e754e9dc8 req-39553de7-e5c6-4676-94c6-275303a88be0 service nova] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Refreshing instance network info cache due to event network-changed-4b1efa68-0e83-4c79-94dc-33b0388cdbc1. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 980.096660] env[68638]: DEBUG oslo_concurrency.lockutils [req-28e744a8-04e4-4388-b82f-c59e754e9dc8 req-39553de7-e5c6-4676-94c6-275303a88be0 service nova] Acquiring lock "refresh_cache-43e0eed3-bc25-476d-a9ef-6b132514cf90" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.108106] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834009, 'name': PowerOnVM_Task, 'duration_secs': 0.82837} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.108363] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 980.108556] env[68638]: INFO nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Took 8.72 seconds to spawn the instance on the hypervisor. [ 980.108758] env[68638]: DEBUG nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 980.109546] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-060378b0-dd88-4110-95ff-2d4da3ada848 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.184974] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834010, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.564181} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.184974] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 32d43fce-837d-41d9-be11-a0c3cdb1694b/32d43fce-837d-41d9-be11-a0c3cdb1694b.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 980.184974] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 980.185301] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ade84872-afad-4789-b8dc-26543d0c7317 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.194043] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 980.194043] env[68638]: value = "task-2834012" [ 980.194043] env[68638]: _type = "Task" [ 980.194043] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.207377] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834012, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.250634] env[68638]: DEBUG nova.scheduler.client.report [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 980.267314] env[68638]: DEBUG nova.network.neutron [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 980.408431] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834011, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.426198] env[68638]: DEBUG nova.network.neutron [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Updating instance_info_cache with network_info: [{"id": "4b1efa68-0e83-4c79-94dc-33b0388cdbc1", "address": "fa:16:3e:d7:4d:30", "network": {"id": "ad22ed5c-0d03-45c8-8bc4-c4f51dbac4fc", "bridge": "br-int", "label": "tempest-ServersTestJSON-2147381832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98a35cb6ae4d4c8688fb89d7da0b2dd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b1efa68-0e", "ovs_interfaceid": "4b1efa68-0e83-4c79-94dc-33b0388cdbc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.629871] env[68638]: INFO nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Took 33.27 seconds to build instance. [ 980.703824] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834012, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092852} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.704129] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 980.704954] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d1af0ac-1dfa-405d-80d2-dc86f78d23a1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.728761] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] 32d43fce-837d-41d9-be11-a0c3cdb1694b/32d43fce-837d-41d9-be11-a0c3cdb1694b.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 980.729089] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f5feff8-8e8d-4850-9380-965444f5dd35 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.749479] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 980.749479] env[68638]: value = "task-2834014" [ 980.749479] env[68638]: _type = "Task" [ 980.749479] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.759273] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.973s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.761318] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834014, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.762524] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.801s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.764730] env[68638]: INFO nova.compute.claims [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 980.790020] env[68638]: INFO nova.scheduler.client.report [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Deleted allocations for instance 1bc685aa-4e88-402f-b581-d179706b12a5 [ 980.903840] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834011, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.602125} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.904272] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] e9b8e5ad-4d47-48ad-995f-b28d0230df0f/e9b8e5ad-4d47-48ad-995f-b28d0230df0f.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 980.904567] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 980.904909] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-db0cdbe3-245e-4922-9111-9760085d12b3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.911544] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 980.911544] env[68638]: value = "task-2834015" [ 980.911544] env[68638]: _type = "Task" [ 980.911544] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.923819] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834015, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.929651] env[68638]: DEBUG oslo_concurrency.lockutils [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Releasing lock "refresh_cache-43e0eed3-bc25-476d-a9ef-6b132514cf90" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 980.930452] env[68638]: DEBUG nova.compute.manager [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Instance network_info: |[{"id": "4b1efa68-0e83-4c79-94dc-33b0388cdbc1", "address": "fa:16:3e:d7:4d:30", "network": {"id": "ad22ed5c-0d03-45c8-8bc4-c4f51dbac4fc", "bridge": "br-int", "label": "tempest-ServersTestJSON-2147381832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98a35cb6ae4d4c8688fb89d7da0b2dd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b1efa68-0e", "ovs_interfaceid": "4b1efa68-0e83-4c79-94dc-33b0388cdbc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 980.931179] env[68638]: DEBUG oslo_concurrency.lockutils [req-28e744a8-04e4-4388-b82f-c59e754e9dc8 req-39553de7-e5c6-4676-94c6-275303a88be0 service nova] Acquired lock "refresh_cache-43e0eed3-bc25-476d-a9ef-6b132514cf90" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 980.931506] env[68638]: DEBUG nova.network.neutron [req-28e744a8-04e4-4388-b82f-c59e754e9dc8 req-39553de7-e5c6-4676-94c6-275303a88be0 service nova] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Refreshing network info cache for port 4b1efa68-0e83-4c79-94dc-33b0388cdbc1 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 980.934300] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:4d:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '46e1fc20-2067-4e1a-9812-702772a2c82c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4b1efa68-0e83-4c79-94dc-33b0388cdbc1', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 980.952760] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 980.954355] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 980.954683] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f58823df-6550-4f87-8088-f899b5e6c2ce {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.979571] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 980.979571] env[68638]: value = "task-2834016" [ 980.979571] env[68638]: _type = "Task" [ 980.979571] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.988730] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834016, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.133098] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "ea8f58dc-1542-4723-bf86-369d4dff5f25" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.791s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.259827] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834014, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.298563] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e00ac22-476c-487a-9e58-2eaa34b30735 tempest-ServersTestManualDisk-758396376 tempest-ServersTestManualDisk-758396376-project-member] Lock "1bc685aa-4e88-402f-b581-d179706b12a5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.791s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.427090] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834015, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.149736} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.427503] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 981.429273] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e222bd-0af6-4921-ad90-90e56f333c37 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.460393] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] e9b8e5ad-4d47-48ad-995f-b28d0230df0f/e9b8e5ad-4d47-48ad-995f-b28d0230df0f.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 981.461057] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-14629c25-242d-4d3a-85be-c78aff598c82 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.481634] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 981.481634] env[68638]: value = "task-2834017" [ 981.481634] env[68638]: _type = "Task" [ 981.481634] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.492562] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834016, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.495832] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834017, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.761179] env[68638]: DEBUG nova.network.neutron [req-28e744a8-04e4-4388-b82f-c59e754e9dc8 req-39553de7-e5c6-4676-94c6-275303a88be0 service nova] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Updated VIF entry in instance network info cache for port 4b1efa68-0e83-4c79-94dc-33b0388cdbc1. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 981.761179] env[68638]: DEBUG nova.network.neutron [req-28e744a8-04e4-4388-b82f-c59e754e9dc8 req-39553de7-e5c6-4676-94c6-275303a88be0 service nova] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Updating instance_info_cache with network_info: [{"id": "4b1efa68-0e83-4c79-94dc-33b0388cdbc1", "address": "fa:16:3e:d7:4d:30", "network": {"id": "ad22ed5c-0d03-45c8-8bc4-c4f51dbac4fc", "bridge": "br-int", "label": "tempest-ServersTestJSON-2147381832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98a35cb6ae4d4c8688fb89d7da0b2dd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b1efa68-0e", "ovs_interfaceid": "4b1efa68-0e83-4c79-94dc-33b0388cdbc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.764046] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834014, 'name': ReconfigVM_Task, 'duration_secs': 0.545354} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.764704] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Reconfigured VM instance instance-00000054 to attach disk [datastore2] 32d43fce-837d-41d9-be11-a0c3cdb1694b/32d43fce-837d-41d9-be11-a0c3cdb1694b.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 981.765562] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ee1f3ff5-7725-4321-b674-53dd2eefa577 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.775021] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 981.775021] env[68638]: value = "task-2834018" [ 981.775021] env[68638]: _type = "Task" [ 981.775021] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.786867] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834018, 'name': Rename_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.995295] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834017, 'name': ReconfigVM_Task, 'duration_secs': 0.280524} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.001423] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Reconfigured VM instance instance-00000055 to attach disk [datastore2] e9b8e5ad-4d47-48ad-995f-b28d0230df0f/e9b8e5ad-4d47-48ad-995f-b28d0230df0f.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 982.002224] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834016, 'name': CreateVM_Task, 'duration_secs': 0.623386} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.004081] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-06a49571-3594-4ca5-a240-03d1a236d372 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.004173] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 982.004721] env[68638]: DEBUG oslo_concurrency.lockutils [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.004910] env[68638]: DEBUG oslo_concurrency.lockutils [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 982.005240] env[68638]: DEBUG oslo_concurrency.lockutils [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 982.006657] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1757ac5b-e879-4818-bcd1-5db4c6ede11a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.011315] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 982.011315] env[68638]: value = "task-2834019" [ 982.011315] env[68638]: _type = "Task" [ 982.011315] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.016244] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 982.016244] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]526b86c4-9d23-3b74-c039-922f4114bfe2" [ 982.016244] env[68638]: _type = "Task" [ 982.016244] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.022934] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834019, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.028283] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]526b86c4-9d23-3b74-c039-922f4114bfe2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.152517] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da700ce8-fd40-47d0-9b20-38a29c48b89e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.162373] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5e150b9-a279-48e7-8b78-77324b69ac88 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.198261] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d2f931-b947-489d-bc46-2852ccd8d343 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.206599] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7994eedd-c051-406c-8930-510193e07e37 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.951401] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-8540ac2e-f004-43ab-a2de-b1345b55d216 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Volume attach. Driver type: vmdk {{(pid=68638) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 982.951789] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-8540ac2e-f004-43ab-a2de-b1345b55d216 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569969', 'volume_id': 'c34aafd8-4df4-43bc-830e-f0985bda456b', 'name': 'volume-c34aafd8-4df4-43bc-830e-f0985bda456b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '94af9123-435f-4ae4-8b6d-82838df61d4e', 'attached_at': '', 'detached_at': '', 'volume_id': 'c34aafd8-4df4-43bc-830e-f0985bda456b', 'serial': 'c34aafd8-4df4-43bc-830e-f0985bda456b'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 982.952262] env[68638]: DEBUG oslo_concurrency.lockutils [req-28e744a8-04e4-4388-b82f-c59e754e9dc8 req-39553de7-e5c6-4676-94c6-275303a88be0 service nova] Releasing lock "refresh_cache-43e0eed3-bc25-476d-a9ef-6b132514cf90" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.953815] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec9d4aeb-f36f-4eb4-8edf-d1c8eefa772e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.977087] env[68638]: DEBUG nova.compute.provider_tree [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 982.997527] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834018, 'name': Rename_Task, 'duration_secs': 0.13666} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.997838] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834019, 'name': Rename_Task, 'duration_secs': 0.135345} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.999799] env[68638]: DEBUG nova.scheduler.client.report [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 983.003251] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bdeede1-5450-46df-b2c6-39928d355c57 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.005856] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 983.006173] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 983.010557] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-14d5aef9-108a-4f12-bc74-23b4838019c8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.012230] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9bb77713-6175-4f06-961d-6a374bb7f826 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.014142] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]526b86c4-9d23-3b74-c039-922f4114bfe2, 'name': SearchDatastore_Task, 'duration_secs': 0.038526} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.014809] env[68638]: DEBUG oslo_concurrency.lockutils [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 983.015530] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 983.015530] env[68638]: DEBUG oslo_concurrency.lockutils [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.015530] env[68638]: DEBUG oslo_concurrency.lockutils [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 983.015718] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 983.016811] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f6b2c57-7654-46ee-8014-de4d713cb4d9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.038566] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-8540ac2e-f004-43ab-a2de-b1345b55d216 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] volume-c34aafd8-4df4-43bc-830e-f0985bda456b/volume-c34aafd8-4df4-43bc-830e-f0985bda456b.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 983.041827] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0440239-5823-4cbe-85f0-e92908e7f752 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.055109] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 983.055109] env[68638]: value = "task-2834021" [ 983.055109] env[68638]: _type = "Task" [ 983.055109] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.055437] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 983.055437] env[68638]: value = "task-2834020" [ 983.055437] env[68638]: _type = "Task" [ 983.055437] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.057210] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 983.057548] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 983.064662] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edf6d8dc-b408-4226-ac94-e1cdda015e17 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.067992] env[68638]: DEBUG oslo_vmware.api [None req-8540ac2e-f004-43ab-a2de-b1345b55d216 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 983.067992] env[68638]: value = "task-2834022" [ 983.067992] env[68638]: _type = "Task" [ 983.067992] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.079758] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834021, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.080040] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 983.080040] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d1711c-0dfd-1614-8216-aafc2601ad96" [ 983.080040] env[68638]: _type = "Task" [ 983.080040] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.080040] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834020, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.087061] env[68638]: DEBUG oslo_vmware.api [None req-8540ac2e-f004-43ab-a2de-b1345b55d216 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834022, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.092473] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d1711c-0dfd-1614-8216-aafc2601ad96, 'name': SearchDatastore_Task, 'duration_secs': 0.018496} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.093428] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9550ea72-d039-47db-92d3-d7c5ee44790f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.098432] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 983.098432] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528ee70f-65da-d756-8431-59a787b7acc6" [ 983.098432] env[68638]: _type = "Task" [ 983.098432] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.107778] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528ee70f-65da-d756-8431-59a787b7acc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.508841] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.747s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 983.509403] env[68638]: DEBUG nova.compute.manager [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 983.512120] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.915s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 983.513558] env[68638]: INFO nova.compute.claims [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 983.577061] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834020, 'name': PowerOnVM_Task} progress is 90%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.577061] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834021, 'name': PowerOnVM_Task, 'duration_secs': 0.494766} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.581422] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 983.581647] env[68638]: INFO nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Took 7.21 seconds to spawn the instance on the hypervisor. [ 983.581827] env[68638]: DEBUG nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 983.582889] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff9e442c-2c2b-49e9-bd40-e95327d69418 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.596127] env[68638]: DEBUG oslo_vmware.api [None req-8540ac2e-f004-43ab-a2de-b1345b55d216 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834022, 'name': ReconfigVM_Task, 'duration_secs': 0.445225} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.596433] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-8540ac2e-f004-43ab-a2de-b1345b55d216 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Reconfigured VM instance instance-00000052 to attach disk [datastore2] volume-c34aafd8-4df4-43bc-830e-f0985bda456b/volume-c34aafd8-4df4-43bc-830e-f0985bda456b.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 983.601421] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-190caec0-0c5d-45cb-8f06-686611cf3bf5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.621864] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528ee70f-65da-d756-8431-59a787b7acc6, 'name': SearchDatastore_Task, 'duration_secs': 0.011963} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.623196] env[68638]: DEBUG oslo_concurrency.lockutils [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 983.623480] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 43e0eed3-bc25-476d-a9ef-6b132514cf90/43e0eed3-bc25-476d-a9ef-6b132514cf90.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 983.623795] env[68638]: DEBUG oslo_vmware.api [None req-8540ac2e-f004-43ab-a2de-b1345b55d216 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 983.623795] env[68638]: value = "task-2834023" [ 983.623795] env[68638]: _type = "Task" [ 983.623795] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.623989] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cd4debad-a340-4333-9b10-75595009dad1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.637659] env[68638]: DEBUG oslo_vmware.api [None req-8540ac2e-f004-43ab-a2de-b1345b55d216 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834023, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.638808] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 983.638808] env[68638]: value = "task-2834024" [ 983.638808] env[68638]: _type = "Task" [ 983.638808] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.646928] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834024, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.023682] env[68638]: DEBUG nova.compute.utils [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 984.027947] env[68638]: DEBUG nova.compute.manager [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 984.027947] env[68638]: DEBUG nova.network.neutron [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 984.069153] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834020, 'name': PowerOnVM_Task} progress is 90%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.094731] env[68638]: DEBUG nova.policy [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2342b684bc504028a54c449fae10cc76', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '87e8ae27bc22417a82dce8d4fe1ca8e3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 984.103136] env[68638]: INFO nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Took 36.68 seconds to build instance. [ 984.139590] env[68638]: DEBUG oslo_vmware.api [None req-8540ac2e-f004-43ab-a2de-b1345b55d216 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834023, 'name': ReconfigVM_Task, 'duration_secs': 0.215442} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.140363] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-8540ac2e-f004-43ab-a2de-b1345b55d216 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569969', 'volume_id': 'c34aafd8-4df4-43bc-830e-f0985bda456b', 'name': 'volume-c34aafd8-4df4-43bc-830e-f0985bda456b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '94af9123-435f-4ae4-8b6d-82838df61d4e', 'attached_at': '', 'detached_at': '', 'volume_id': 'c34aafd8-4df4-43bc-830e-f0985bda456b', 'serial': 'c34aafd8-4df4-43bc-830e-f0985bda456b'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 984.154051] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834024, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.460525] env[68638]: DEBUG nova.network.neutron [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Successfully created port: 4d549532-6530-40bb-95cf-fba098d9c5d8 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 984.537218] env[68638]: DEBUG nova.compute.manager [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 984.570654] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834020, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.605234] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "e9b8e5ad-4d47-48ad-995f-b28d0230df0f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.191s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 984.661436] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834024, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.604622} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.661436] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 43e0eed3-bc25-476d-a9ef-6b132514cf90/43e0eed3-bc25-476d-a9ef-6b132514cf90.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 984.662044] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 984.662372] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cc90e417-2802-48b2-848e-82478ed12206 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.672749] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 984.672749] env[68638]: value = "task-2834025" [ 984.672749] env[68638]: _type = "Task" [ 984.672749] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.682286] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834025, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.885494] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0172200f-3317-47df-87ae-7ac814534c6e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.893094] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e0df65c-b1dd-417a-9c1f-02d6911eaa61 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.925747] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fec38a3-8471-4df4-828e-657377c213ba {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.934690] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e01fc2ce-caff-4da3-93da-0b8e6d4a0d93 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.951856] env[68638]: DEBUG nova.compute.provider_tree [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 985.072994] env[68638]: DEBUG oslo_vmware.api [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834020, 'name': PowerOnVM_Task, 'duration_secs': 1.56703} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.072994] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 985.072994] env[68638]: INFO nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Took 11.19 seconds to spawn the instance on the hypervisor. [ 985.072994] env[68638]: DEBUG nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 985.072994] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ac9b53-3b96-4d68-a051-effc8bb3d7d4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.184359] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834025, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074517} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.185203] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 985.188075] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d4e57d-940b-43e1-a053-91a07755a37a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.210454] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] 43e0eed3-bc25-476d-a9ef-6b132514cf90/43e0eed3-bc25-476d-a9ef-6b132514cf90.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 985.211496] env[68638]: DEBUG nova.objects.instance [None req-8540ac2e-f004-43ab-a2de-b1345b55d216 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lazy-loading 'flavor' on Instance uuid 94af9123-435f-4ae4-8b6d-82838df61d4e {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 985.214932] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0628983e-c290-4129-ac97-44f4197ec11a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.239887] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 985.239887] env[68638]: value = "task-2834026" [ 985.239887] env[68638]: _type = "Task" [ 985.239887] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.251745] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834026, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.483016] env[68638]: ERROR nova.scheduler.client.report [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [req-50131bfb-8c4f-43c8-9c12-fb0e2d514716] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-50131bfb-8c4f-43c8-9c12-fb0e2d514716"}]} [ 985.498507] env[68638]: DEBUG nova.scheduler.client.report [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 985.512553] env[68638]: DEBUG nova.scheduler.client.report [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 985.513029] env[68638]: DEBUG nova.compute.provider_tree [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 985.529918] env[68638]: DEBUG nova.scheduler.client.report [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 985.546251] env[68638]: DEBUG nova.compute.manager [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 985.550052] env[68638]: DEBUG oslo_concurrency.lockutils [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "94af9123-435f-4ae4-8b6d-82838df61d4e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 985.551481] env[68638]: DEBUG nova.scheduler.client.report [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 985.584564] env[68638]: DEBUG nova.virt.hardware [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 985.584926] env[68638]: DEBUG nova.virt.hardware [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 985.585552] env[68638]: DEBUG nova.virt.hardware [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 985.585856] env[68638]: DEBUG nova.virt.hardware [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 985.586107] env[68638]: DEBUG nova.virt.hardware [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 985.586331] env[68638]: DEBUG nova.virt.hardware [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 985.586667] env[68638]: DEBUG nova.virt.hardware [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 985.586897] env[68638]: DEBUG nova.virt.hardware [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 985.587150] env[68638]: DEBUG nova.virt.hardware [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 985.587523] env[68638]: DEBUG nova.virt.hardware [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 985.587725] env[68638]: DEBUG nova.virt.hardware [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 985.596697] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca3a72f-8971-4798-a16a-de22b3b4f9f6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.602341] env[68638]: INFO nova.compute.manager [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Took 38.21 seconds to build instance. [ 985.606753] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051b51ef-109f-43d0-90cc-60f7bcd6b70b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.718025] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8540ac2e-f004-43ab-a2de-b1345b55d216 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "94af9123-435f-4ae4-8b6d-82838df61d4e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.595s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.719741] env[68638]: DEBUG oslo_concurrency.lockutils [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "94af9123-435f-4ae4-8b6d-82838df61d4e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.169s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.719741] env[68638]: DEBUG oslo_concurrency.lockutils [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "94af9123-435f-4ae4-8b6d-82838df61d4e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 985.722599] env[68638]: DEBUG oslo_concurrency.lockutils [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "94af9123-435f-4ae4-8b6d-82838df61d4e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.722599] env[68638]: DEBUG oslo_concurrency.lockutils [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "94af9123-435f-4ae4-8b6d-82838df61d4e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.727698] env[68638]: INFO nova.compute.manager [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Terminating instance [ 985.756762] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834026, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.920433] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b14202e-84ab-4b69-8ad6-a7aaf85825d0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.929403] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8afe5df1-a985-4642-9b9e-b65c19164fb1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.960867] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b4f9fd0-2047-4d4f-ac91-148ab5380e1e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.969058] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8a0058-68a1-410d-a325-224d22032be0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.983290] env[68638]: DEBUG nova.compute.provider_tree [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 986.106776] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0b86e9b3-bc0b-4f11-86c6-52c89361e5ac tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "32d43fce-837d-41d9-be11-a0c3cdb1694b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.733s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 986.127445] env[68638]: DEBUG nova.compute.manager [req-92d1ea31-4953-459f-8f01-014d4c0ff0fc req-cb8f7c19-a112-4245-9c56-6103c7d203c3 service nova] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Received event network-vif-plugged-4d549532-6530-40bb-95cf-fba098d9c5d8 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 986.127445] env[68638]: DEBUG oslo_concurrency.lockutils [req-92d1ea31-4953-459f-8f01-014d4c0ff0fc req-cb8f7c19-a112-4245-9c56-6103c7d203c3 service nova] Acquiring lock "f0598d8d-09a9-44ce-b4d7-cb8830a84b94-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 986.127445] env[68638]: DEBUG oslo_concurrency.lockutils [req-92d1ea31-4953-459f-8f01-014d4c0ff0fc req-cb8f7c19-a112-4245-9c56-6103c7d203c3 service nova] Lock "f0598d8d-09a9-44ce-b4d7-cb8830a84b94-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 986.127445] env[68638]: DEBUG oslo_concurrency.lockutils [req-92d1ea31-4953-459f-8f01-014d4c0ff0fc req-cb8f7c19-a112-4245-9c56-6103c7d203c3 service nova] Lock "f0598d8d-09a9-44ce-b4d7-cb8830a84b94-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 986.127445] env[68638]: DEBUG nova.compute.manager [req-92d1ea31-4953-459f-8f01-014d4c0ff0fc req-cb8f7c19-a112-4245-9c56-6103c7d203c3 service nova] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] No waiting events found dispatching network-vif-plugged-4d549532-6530-40bb-95cf-fba098d9c5d8 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 986.127445] env[68638]: WARNING nova.compute.manager [req-92d1ea31-4953-459f-8f01-014d4c0ff0fc req-cb8f7c19-a112-4245-9c56-6103c7d203c3 service nova] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Received unexpected event network-vif-plugged-4d549532-6530-40bb-95cf-fba098d9c5d8 for instance with vm_state building and task_state spawning. [ 986.231784] env[68638]: DEBUG nova.compute.manager [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 986.231784] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 986.231784] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5e42eb66-05db-4c8b-85f4-ee7c320be71b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.240329] env[68638]: DEBUG oslo_vmware.api [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 986.240329] env[68638]: value = "task-2834027" [ 986.240329] env[68638]: _type = "Task" [ 986.240329] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.251986] env[68638]: DEBUG oslo_vmware.api [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834027, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.255308] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834026, 'name': ReconfigVM_Task, 'duration_secs': 0.615025} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.255639] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Reconfigured VM instance instance-00000056 to attach disk [datastore2] 43e0eed3-bc25-476d-a9ef-6b132514cf90/43e0eed3-bc25-476d-a9ef-6b132514cf90.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 986.256890] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8f1b14f6-84ed-481e-ad61-f50364520cc4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.262710] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 986.262710] env[68638]: value = "task-2834028" [ 986.262710] env[68638]: _type = "Task" [ 986.262710] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.272401] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834028, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.290022] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquiring lock "ea8f58dc-1542-4723-bf86-369d4dff5f25" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 986.290287] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "ea8f58dc-1542-4723-bf86-369d4dff5f25" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 986.290492] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquiring lock "ea8f58dc-1542-4723-bf86-369d4dff5f25-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 986.290666] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "ea8f58dc-1542-4723-bf86-369d4dff5f25-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 986.290830] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "ea8f58dc-1542-4723-bf86-369d4dff5f25-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 986.292980] env[68638]: INFO nova.compute.manager [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Terminating instance [ 986.525908] env[68638]: DEBUG nova.scheduler.client.report [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 115 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 986.526222] env[68638]: DEBUG nova.compute.provider_tree [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 115 to 116 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 986.526415] env[68638]: DEBUG nova.compute.provider_tree [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 986.750878] env[68638]: DEBUG oslo_vmware.api [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834027, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.768932] env[68638]: DEBUG nova.network.neutron [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Successfully updated port: 4d549532-6530-40bb-95cf-fba098d9c5d8 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 986.779240] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834028, 'name': Rename_Task, 'duration_secs': 0.174113} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.779565] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 986.779815] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2567299a-091c-4baa-8eb5-902cd0460838 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.788079] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 986.788079] env[68638]: value = "task-2834029" [ 986.788079] env[68638]: _type = "Task" [ 986.788079] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.797361] env[68638]: DEBUG nova.compute.manager [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 986.797595] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 986.797955] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834029, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.798805] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae7cdf6-a37a-489a-a86e-33d23d2c3734 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.805820] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 986.806105] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-074e8ff9-6591-437f-9f17-1a5cf29e0fd0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.814856] env[68638]: DEBUG oslo_vmware.api [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 986.814856] env[68638]: value = "task-2834030" [ 986.814856] env[68638]: _type = "Task" [ 986.814856] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.824803] env[68638]: DEBUG oslo_vmware.api [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834030, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.853756] env[68638]: DEBUG nova.compute.manager [req-d98af502-3c5e-4a0a-bc22-4d28c467ded7 req-34482c52-1d2e-45f3-a761-161c2414deec service nova] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Received event network-changed-4d549532-6530-40bb-95cf-fba098d9c5d8 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 986.854038] env[68638]: DEBUG nova.compute.manager [req-d98af502-3c5e-4a0a-bc22-4d28c467ded7 req-34482c52-1d2e-45f3-a761-161c2414deec service nova] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Refreshing instance network info cache due to event network-changed-4d549532-6530-40bb-95cf-fba098d9c5d8. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 986.854279] env[68638]: DEBUG oslo_concurrency.lockutils [req-d98af502-3c5e-4a0a-bc22-4d28c467ded7 req-34482c52-1d2e-45f3-a761-161c2414deec service nova] Acquiring lock "refresh_cache-f0598d8d-09a9-44ce-b4d7-cb8830a84b94" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.854417] env[68638]: DEBUG oslo_concurrency.lockutils [req-d98af502-3c5e-4a0a-bc22-4d28c467ded7 req-34482c52-1d2e-45f3-a761-161c2414deec service nova] Acquired lock "refresh_cache-f0598d8d-09a9-44ce-b4d7-cb8830a84b94" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 986.854542] env[68638]: DEBUG nova.network.neutron [req-d98af502-3c5e-4a0a-bc22-4d28c467ded7 req-34482c52-1d2e-45f3-a761-161c2414deec service nova] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Refreshing network info cache for port 4d549532-6530-40bb-95cf-fba098d9c5d8 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 987.031383] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.519s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.032084] env[68638]: DEBUG nova.compute.manager [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 987.037829] env[68638]: DEBUG oslo_concurrency.lockutils [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.872s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.039578] env[68638]: INFO nova.compute.claims [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 987.252269] env[68638]: DEBUG oslo_vmware.api [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834027, 'name': PowerOffVM_Task, 'duration_secs': 0.761028} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.252824] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 987.253972] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Volume detach. Driver type: vmdk {{(pid=68638) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 987.253972] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569969', 'volume_id': 'c34aafd8-4df4-43bc-830e-f0985bda456b', 'name': 'volume-c34aafd8-4df4-43bc-830e-f0985bda456b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '94af9123-435f-4ae4-8b6d-82838df61d4e', 'attached_at': '', 'detached_at': '', 'volume_id': 'c34aafd8-4df4-43bc-830e-f0985bda456b', 'serial': 'c34aafd8-4df4-43bc-830e-f0985bda456b'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 987.254305] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2803522e-eebc-4990-ba80-0cc754373c79 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.280637] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Acquiring lock "refresh_cache-f0598d8d-09a9-44ce-b4d7-cb8830a84b94" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.281719] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93be4ff2-1b84-45d0-a9f8-513c99ba47fb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.295259] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ba0ad7-0ed9-4fd3-978a-7d832405bf20 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.304333] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834029, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.331534] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eafe2ed-e890-42de-a333-c3c9e4c91825 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.339625] env[68638]: DEBUG oslo_vmware.api [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834030, 'name': PowerOffVM_Task, 'duration_secs': 0.205533} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.355584] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 987.355846] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 987.356724] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] The volume has not been displaced from its original location: [datastore2] volume-c34aafd8-4df4-43bc-830e-f0985bda456b/volume-c34aafd8-4df4-43bc-830e-f0985bda456b.vmdk. No consolidation needed. {{(pid=68638) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 987.362910] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Reconfiguring VM instance instance-00000052 to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 987.363578] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cca68b1d-5fe7-49d5-b47c-cb7a5bf540b8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.367411] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dc756189-c4c3-4a81-ba73-531f15c7dde8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.390050] env[68638]: DEBUG oslo_vmware.api [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 987.390050] env[68638]: value = "task-2834032" [ 987.390050] env[68638]: _type = "Task" [ 987.390050] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.398992] env[68638]: DEBUG oslo_vmware.api [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834032, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.441417] env[68638]: DEBUG nova.network.neutron [req-d98af502-3c5e-4a0a-bc22-4d28c467ded7 req-34482c52-1d2e-45f3-a761-161c2414deec service nova] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 987.460548] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 987.460793] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 987.461010] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Deleting the datastore file [datastore2] ea8f58dc-1542-4723-bf86-369d4dff5f25 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 987.461340] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-98da2101-e2ca-4f02-8e40-c4c9171f3345 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.468870] env[68638]: DEBUG oslo_vmware.api [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 987.468870] env[68638]: value = "task-2834033" [ 987.468870] env[68638]: _type = "Task" [ 987.468870] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.480163] env[68638]: DEBUG oslo_vmware.api [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834033, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.546694] env[68638]: DEBUG nova.network.neutron [req-d98af502-3c5e-4a0a-bc22-4d28c467ded7 req-34482c52-1d2e-45f3-a761-161c2414deec service nova] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.553421] env[68638]: DEBUG nova.compute.utils [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 987.554585] env[68638]: DEBUG nova.compute.manager [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Not allocating networking since 'none' was specified. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 987.755963] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "df2e066d-7c71-4aec-ab9b-a339a7ff21fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.756394] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "df2e066d-7c71-4aec-ab9b-a339a7ff21fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.803352] env[68638]: DEBUG oslo_vmware.api [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834029, 'name': PowerOnVM_Task, 'duration_secs': 0.55473} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.803618] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 987.804491] env[68638]: INFO nova.compute.manager [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Took 8.96 seconds to spawn the instance on the hypervisor. [ 987.804491] env[68638]: DEBUG nova.compute.manager [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 987.805504] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3060ae3d-8570-4d0b-9c08-c5ae1fedb24f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.899401] env[68638]: DEBUG oslo_vmware.api [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834032, 'name': ReconfigVM_Task, 'duration_secs': 0.25576} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.899708] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Reconfigured VM instance instance-00000052 to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 987.905742] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1fd7eeb5-4d02-4754-afec-e5e3d9119d78 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.921935] env[68638]: DEBUG oslo_vmware.api [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 987.921935] env[68638]: value = "task-2834034" [ 987.921935] env[68638]: _type = "Task" [ 987.921935] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.930086] env[68638]: DEBUG oslo_vmware.api [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834034, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.976999] env[68638]: DEBUG oslo_vmware.api [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834033, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.246691} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.977341] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 987.977529] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 987.977705] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 987.977876] env[68638]: INFO nova.compute.manager [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Took 1.18 seconds to destroy the instance on the hypervisor. [ 987.978136] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 987.979387] env[68638]: DEBUG nova.compute.manager [-] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 987.979387] env[68638]: DEBUG nova.network.neutron [-] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 988.052821] env[68638]: DEBUG oslo_concurrency.lockutils [req-d98af502-3c5e-4a0a-bc22-4d28c467ded7 req-34482c52-1d2e-45f3-a761-161c2414deec service nova] Releasing lock "refresh_cache-f0598d8d-09a9-44ce-b4d7-cb8830a84b94" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 988.053717] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Acquired lock "refresh_cache-f0598d8d-09a9-44ce-b4d7-cb8830a84b94" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 988.053717] env[68638]: DEBUG nova.network.neutron [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 988.058614] env[68638]: DEBUG nova.compute.manager [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 988.259252] env[68638]: DEBUG nova.compute.manager [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 988.295683] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "7d99d946-f2df-4d31-911f-ac479849b901" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.296129] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "7d99d946-f2df-4d31-911f-ac479849b901" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.325254] env[68638]: INFO nova.compute.manager [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Took 40.71 seconds to build instance. [ 988.436508] env[68638]: DEBUG oslo_vmware.api [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834034, 'name': ReconfigVM_Task, 'duration_secs': 0.318422} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.436850] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569969', 'volume_id': 'c34aafd8-4df4-43bc-830e-f0985bda456b', 'name': 'volume-c34aafd8-4df4-43bc-830e-f0985bda456b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '94af9123-435f-4ae4-8b6d-82838df61d4e', 'attached_at': '', 'detached_at': '', 'volume_id': 'c34aafd8-4df4-43bc-830e-f0985bda456b', 'serial': 'c34aafd8-4df4-43bc-830e-f0985bda456b'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 988.437214] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 988.438115] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4242c0b1-136b-49b2-9fba-f28ff14b973a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.446259] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 988.449297] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6bc37406-09ea-402d-b97f-ada7503d1470 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.505081] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9494046d-a2e8-4dd7-8622-3f9eedb544c9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.513610] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c471effb-84fe-48fa-847f-521d7d416b7f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.518491] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 988.518756] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 988.518943] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Deleting the datastore file [datastore2] 94af9123-435f-4ae4-8b6d-82838df61d4e {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 988.519199] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4200e4b7-6364-44d5-80b2-114ad005bf4d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.549618] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59659cc6-b6b3-468d-8d09-18c55f81dcd6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.552313] env[68638]: DEBUG oslo_vmware.api [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 988.552313] env[68638]: value = "task-2834036" [ 988.552313] env[68638]: _type = "Task" [ 988.552313] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.561324] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7253f9ec-ef0d-4cb0-b256-a2606375284d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.571859] env[68638]: DEBUG oslo_vmware.api [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834036, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.584280] env[68638]: DEBUG nova.compute.provider_tree [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 988.606108] env[68638]: DEBUG nova.network.neutron [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 988.740022] env[68638]: DEBUG nova.network.neutron [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Updating instance_info_cache with network_info: [{"id": "4d549532-6530-40bb-95cf-fba098d9c5d8", "address": "fa:16:3e:23:b1:b3", "network": {"id": "e967a4ea-9ab0-4ed5-b14b-813f7a59308f", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1273332228-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87e8ae27bc22417a82dce8d4fe1ca8e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d549532-65", "ovs_interfaceid": "4d549532-6530-40bb-95cf-fba098d9c5d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.780596] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.799932] env[68638]: DEBUG nova.compute.manager [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 988.817193] env[68638]: DEBUG nova.network.neutron [-] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.830931] env[68638]: DEBUG oslo_concurrency.lockutils [None req-63aeed7c-8785-4e31-8b3b-2ac89ef5a2a4 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "43e0eed3-bc25-476d-a9ef-6b132514cf90" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.225s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.884891] env[68638]: DEBUG nova.compute.manager [req-1a63ae9d-8d99-4f79-b8b4-2496ac593663 req-8e83f1e0-96ba-424e-8bbe-8ae0e5d118b6 service nova] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Received event network-vif-deleted-efc7a36d-f958-419e-b67e-5f3724ac0afc {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 989.067575] env[68638]: DEBUG oslo_vmware.api [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834036, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.077673] env[68638]: DEBUG nova.compute.manager [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 989.088068] env[68638]: DEBUG nova.scheduler.client.report [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 989.108145] env[68638]: DEBUG nova.virt.hardware [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 989.108432] env[68638]: DEBUG nova.virt.hardware [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 989.108589] env[68638]: DEBUG nova.virt.hardware [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 989.108766] env[68638]: DEBUG nova.virt.hardware [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 989.108907] env[68638]: DEBUG nova.virt.hardware [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 989.109071] env[68638]: DEBUG nova.virt.hardware [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 989.109409] env[68638]: DEBUG nova.virt.hardware [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 989.109463] env[68638]: DEBUG nova.virt.hardware [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 989.109614] env[68638]: DEBUG nova.virt.hardware [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 989.109831] env[68638]: DEBUG nova.virt.hardware [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 989.110056] env[68638]: DEBUG nova.virt.hardware [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 989.112543] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21da58bd-8985-4758-9a49-d34095d23243 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.122316] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87969336-b34a-49c4-a642-84ded6ab1625 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.138730] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Instance VIF info [] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 989.144586] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Creating folder: Project (28da1c9156f941f1a6c35b4dcaed3b71). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 989.144927] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-51657241-b118-4766-a642-d1eac2f558a6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.156442] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Created folder: Project (28da1c9156f941f1a6c35b4dcaed3b71) in parent group-v569734. [ 989.156679] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Creating folder: Instances. Parent ref: group-v569972. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 989.156949] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b01c0bae-814f-4d0a-81aa-fa05b118c6d0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.165350] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Created folder: Instances in parent group-v569972. [ 989.165584] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 989.165773] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 989.166066] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-61ebd919-2d01-4a1d-942a-6d0b36602cc5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.182626] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 989.182626] env[68638]: value = "task-2834039" [ 989.182626] env[68638]: _type = "Task" [ 989.182626] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.190895] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834039, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.242712] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Releasing lock "refresh_cache-f0598d8d-09a9-44ce-b4d7-cb8830a84b94" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 989.243172] env[68638]: DEBUG nova.compute.manager [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Instance network_info: |[{"id": "4d549532-6530-40bb-95cf-fba098d9c5d8", "address": "fa:16:3e:23:b1:b3", "network": {"id": "e967a4ea-9ab0-4ed5-b14b-813f7a59308f", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1273332228-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87e8ae27bc22417a82dce8d4fe1ca8e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d549532-65", "ovs_interfaceid": "4d549532-6530-40bb-95cf-fba098d9c5d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 989.243613] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:b1:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d549532-6530-40bb-95cf-fba098d9c5d8', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 989.251745] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Creating folder: Project (87e8ae27bc22417a82dce8d4fe1ca8e3). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 989.252523] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4d0d5a3-89f2-4174-afba-3aec272433f4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.263529] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Created folder: Project (87e8ae27bc22417a82dce8d4fe1ca8e3) in parent group-v569734. [ 989.264055] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Creating folder: Instances. Parent ref: group-v569975. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 989.264055] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9cda4ad0-5887-4af2-b0ce-19ab5c614597 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.273729] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Created folder: Instances in parent group-v569975. [ 989.273729] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 989.273898] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 989.273987] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-75ce945b-5280-411f-8fbe-1b914536261f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.292785] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 989.292785] env[68638]: value = "task-2834042" [ 989.292785] env[68638]: _type = "Task" [ 989.292785] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.300425] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834042, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.320356] env[68638]: INFO nova.compute.manager [-] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Took 1.34 seconds to deallocate network for instance. [ 989.321609] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 989.565342] env[68638]: DEBUG oslo_vmware.api [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834036, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.617254} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.565599] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 989.565830] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 989.566096] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 989.566295] env[68638]: INFO nova.compute.manager [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Took 3.34 seconds to destroy the instance on the hypervisor. [ 989.566572] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 989.566799] env[68638]: DEBUG nova.compute.manager [-] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 989.566905] env[68638]: DEBUG nova.network.neutron [-] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 989.592848] env[68638]: DEBUG oslo_concurrency.lockutils [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.555s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 989.593528] env[68638]: DEBUG nova.compute.manager [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 989.596654] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 28.056s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 989.600162] env[68638]: DEBUG nova.objects.instance [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68638) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 989.692585] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834039, 'name': CreateVM_Task, 'duration_secs': 0.348595} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.692751] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 989.693365] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.693545] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 989.693859] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 989.694131] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4ecf268-e795-4538-9d4b-bbc4697fe834 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.698616] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Waiting for the task: (returnval){ [ 989.698616] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5206843a-b7a4-8cf7-9160-1eed8c1b9ff4" [ 989.698616] env[68638]: _type = "Task" [ 989.698616] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.706126] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5206843a-b7a4-8cf7-9160-1eed8c1b9ff4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.803209] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834042, 'name': CreateVM_Task, 'duration_secs': 0.329715} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.803456] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 989.804488] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.833268] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 989.839017] env[68638]: DEBUG oslo_concurrency.lockutils [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "e7559933-fecc-4eb6-ba71-a295fba684e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 989.839465] env[68638]: DEBUG oslo_concurrency.lockutils [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "e7559933-fecc-4eb6-ba71-a295fba684e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.107033] env[68638]: DEBUG nova.compute.utils [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 990.111155] env[68638]: DEBUG nova.compute.manager [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 990.112007] env[68638]: DEBUG nova.network.neutron [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 990.154124] env[68638]: DEBUG nova.policy [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bab845b45bbb47c1b0b63c8398163ede', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e89fff19d6c461e8818d182dfd7d45e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 990.211159] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5206843a-b7a4-8cf7-9160-1eed8c1b9ff4, 'name': SearchDatastore_Task, 'duration_secs': 0.034664} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.211290] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.211587] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 990.211889] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.212123] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.212369] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 990.212696] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.213115] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 990.213396] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6481ec3f-d1a9-484b-97da-35f3e4889431 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.215407] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ce75266-ee07-487b-a53f-c7f2c31ca858 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.221079] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Waiting for the task: (returnval){ [ 990.221079] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]521475a7-92b1-ad33-7b51-839d14f42c99" [ 990.221079] env[68638]: _type = "Task" [ 990.221079] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.225402] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 990.225656] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 990.226899] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d93b9b2e-b078-43b4-87e9-8ce42586c748 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.232252] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]521475a7-92b1-ad33-7b51-839d14f42c99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.235736] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Waiting for the task: (returnval){ [ 990.235736] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52bf6f72-284e-4207-b36a-c9461d11ef9d" [ 990.235736] env[68638]: _type = "Task" [ 990.235736] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.243230] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52bf6f72-284e-4207-b36a-c9461d11ef9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.342431] env[68638]: DEBUG nova.compute.manager [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 990.442492] env[68638]: DEBUG nova.network.neutron [-] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.480292] env[68638]: DEBUG nova.network.neutron [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Successfully created port: 64e16852-058c-41a3-804c-d16bb756b439 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 990.612306] env[68638]: DEBUG nova.compute.manager [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 990.617986] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7e0a2e17-ce86-4b84-abf2-967132dbaffd tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.617986] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 27.378s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.617986] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.618145] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68638) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 990.618518] env[68638]: DEBUG oslo_concurrency.lockutils [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.025s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.618575] env[68638]: DEBUG nova.objects.instance [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Lazy-loading 'resources' on Instance uuid 039edcf8-7908-4be4-8bd3-0b55545b6f7b {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 990.620997] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808da982-eb71-4016-b5fb-a95695064814 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.632250] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4ac637-9f66-407e-b74a-fdc446b096fb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.646283] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58800248-5355-4ff9-bd89-e00c5898f4d2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.653657] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81dea7c1-008e-417c-866b-11a265c387f3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.690215] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179352MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=68638) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 990.690378] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 990.733116] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]521475a7-92b1-ad33-7b51-839d14f42c99, 'name': SearchDatastore_Task, 'duration_secs': 0.014276} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.733426] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.733699] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 990.733838] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.744129] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52bf6f72-284e-4207-b36a-c9461d11ef9d, 'name': SearchDatastore_Task, 'duration_secs': 0.033508} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.744872] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1a039b5-1fc0-46da-b209-99211aabad30 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.750475] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Waiting for the task: (returnval){ [ 990.750475] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]527d0234-4193-f263-e302-8d03084bfa9b" [ 990.750475] env[68638]: _type = "Task" [ 990.750475] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.760206] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527d0234-4193-f263-e302-8d03084bfa9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.869402] env[68638]: DEBUG oslo_concurrency.lockutils [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 990.912559] env[68638]: DEBUG nova.compute.manager [req-3fd7df57-e2f5-49e2-8582-562e40e10201 req-3f15166d-ed0a-478d-aa7a-2c5a8e56a1a2 service nova] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Received event network-vif-deleted-eb5d1f40-255a-48f5-aacc-0ca9cdafcfa4 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 990.945335] env[68638]: INFO nova.compute.manager [-] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Took 1.38 seconds to deallocate network for instance. [ 991.260033] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527d0234-4193-f263-e302-8d03084bfa9b, 'name': SearchDatastore_Task, 'duration_secs': 0.030952} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.260275] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 991.260537] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] fd329f9d-daf3-47ff-9c48-e1355fc012f4/fd329f9d-daf3-47ff-9c48-e1355fc012f4.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 991.260811] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 991.260992] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 991.261277] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ebd8e727-88da-4813-8620-91088700fb48 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.263215] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de40beb6-ca95-40f9-8499-aeef51234668 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.271350] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Waiting for the task: (returnval){ [ 991.271350] env[68638]: value = "task-2834043" [ 991.271350] env[68638]: _type = "Task" [ 991.271350] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.280448] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 991.280448] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 991.280448] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e025de4-b71f-495c-9f07-75034102d02e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.288265] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Task: {'id': task-2834043, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.295024] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Waiting for the task: (returnval){ [ 991.295024] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52266bfb-92ab-9338-43ff-5481116829b0" [ 991.295024] env[68638]: _type = "Task" [ 991.295024] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.302057] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52266bfb-92ab-9338-43ff-5481116829b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.508239] env[68638]: INFO nova.compute.manager [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Took 0.56 seconds to detach 1 volumes for instance. [ 991.544846] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-656d279d-2b35-4975-8e50-d308d78bf83c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.555708] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a796c3-1146-4c04-bc58-90b4a56c9c54 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.595265] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4539a6c-eec1-4675-9605-763de85c2666 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.605366] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dbe9ca9-0e91-4555-8d1b-a2942fbe70da {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.622135] env[68638]: DEBUG nova.compute.provider_tree [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 991.626869] env[68638]: DEBUG nova.compute.manager [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 991.660108] env[68638]: DEBUG nova.virt.hardware [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 991.660108] env[68638]: DEBUG nova.virt.hardware [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 991.660108] env[68638]: DEBUG nova.virt.hardware [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 991.660108] env[68638]: DEBUG nova.virt.hardware [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 991.660108] env[68638]: DEBUG nova.virt.hardware [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 991.660108] env[68638]: DEBUG nova.virt.hardware [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 991.660108] env[68638]: DEBUG nova.virt.hardware [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 991.660108] env[68638]: DEBUG nova.virt.hardware [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 991.660108] env[68638]: DEBUG nova.virt.hardware [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 991.660108] env[68638]: DEBUG nova.virt.hardware [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 991.660108] env[68638]: DEBUG nova.virt.hardware [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 991.660108] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e209ea6f-25cd-4e42-b4f2-eea526cb7e6d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.670828] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50534894-161e-4880-979e-80eb454e8c73 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.781702] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Task: {'id': task-2834043, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480006} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.782023] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] fd329f9d-daf3-47ff-9c48-e1355fc012f4/fd329f9d-daf3-47ff-9c48-e1355fc012f4.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 991.782191] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 991.782445] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-25620005-032c-4f2f-bdac-52ee71726b23 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.790541] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Waiting for the task: (returnval){ [ 991.790541] env[68638]: value = "task-2834044" [ 991.790541] env[68638]: _type = "Task" [ 991.790541] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.799387] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Task: {'id': task-2834044, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.805999] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52266bfb-92ab-9338-43ff-5481116829b0, 'name': SearchDatastore_Task, 'duration_secs': 0.009341} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.806822] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eaf78a2b-ad8d-44e2-92c8-040243c27e85 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.811851] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Waiting for the task: (returnval){ [ 991.811851] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e3672d-e718-d582-6ea8-1bdea3a27503" [ 991.811851] env[68638]: _type = "Task" [ 991.811851] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.819940] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e3672d-e718-d582-6ea8-1bdea3a27503, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.019876] env[68638]: DEBUG oslo_concurrency.lockutils [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.055420] env[68638]: DEBUG nova.network.neutron [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Successfully updated port: 64e16852-058c-41a3-804c-d16bb756b439 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 992.145356] env[68638]: ERROR nova.scheduler.client.report [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] [req-016835e4-f71a-4470-8bbc-163b6b3681bb] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-016835e4-f71a-4470-8bbc-163b6b3681bb"}]} [ 992.166270] env[68638]: DEBUG nova.scheduler.client.report [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 992.183894] env[68638]: DEBUG nova.scheduler.client.report [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 992.183894] env[68638]: DEBUG nova.compute.provider_tree [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 992.195830] env[68638]: DEBUG nova.scheduler.client.report [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 992.217044] env[68638]: DEBUG nova.scheduler.client.report [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 992.302958] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Task: {'id': task-2834044, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069349} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.303211] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 992.304009] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83919899-8573-41d8-b579-e6d1c19553e1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.328111] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] fd329f9d-daf3-47ff-9c48-e1355fc012f4/fd329f9d-daf3-47ff-9c48-e1355fc012f4.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 992.335015] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f3126d0-14a3-4972-a15f-a9553b24cb2e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.355136] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e3672d-e718-d582-6ea8-1bdea3a27503, 'name': SearchDatastore_Task, 'duration_secs': 0.008483} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.356343] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 992.356605] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] f0598d8d-09a9-44ce-b4d7-cb8830a84b94/f0598d8d-09a9-44ce-b4d7-cb8830a84b94.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 992.357452] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Waiting for the task: (returnval){ [ 992.357452] env[68638]: value = "task-2834045" [ 992.357452] env[68638]: _type = "Task" [ 992.357452] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.359343] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d01def2e-4e11-4e3f-905a-18808e3a21f8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.371866] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Task: {'id': task-2834045, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.376401] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Waiting for the task: (returnval){ [ 992.376401] env[68638]: value = "task-2834046" [ 992.376401] env[68638]: _type = "Task" [ 992.376401] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.391225] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': task-2834046, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.561833] env[68638]: DEBUG oslo_concurrency.lockutils [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Acquiring lock "refresh_cache-da886efd-bca9-45aa-abcc-13832c66a90c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.562386] env[68638]: DEBUG oslo_concurrency.lockutils [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Acquired lock "refresh_cache-da886efd-bca9-45aa-abcc-13832c66a90c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 992.562386] env[68638]: DEBUG nova.network.neutron [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 992.627513] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Acquiring lock "dcaef2e3-eb23-4a0b-b617-2880084e03ab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.627747] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Lock "dcaef2e3-eb23-4a0b-b617-2880084e03ab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 992.643250] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca8ab17-d1bb-44a0-83e9-59369b6cdcc9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.651777] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8464617-df4b-484a-ae24-0a7f9fe550c5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.685452] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08858908-90ac-4eaf-ab23-5a584e21ab80 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.693283] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb3dbe66-7847-4864-9d35-c796b5b97678 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.707967] env[68638]: DEBUG nova.compute.provider_tree [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 992.878311] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Task: {'id': task-2834045, 'name': ReconfigVM_Task, 'duration_secs': 0.473012} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.883994] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Reconfigured VM instance instance-00000058 to attach disk [datastore1] fd329f9d-daf3-47ff-9c48-e1355fc012f4/fd329f9d-daf3-47ff-9c48-e1355fc012f4.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 992.886502] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1ac52f31-74e6-4fee-a58a-513535eaa4a9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.897990] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': task-2834046, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.900120] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Waiting for the task: (returnval){ [ 992.900120] env[68638]: value = "task-2834047" [ 992.900120] env[68638]: _type = "Task" [ 992.900120] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.916622] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Task: {'id': task-2834047, 'name': Rename_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.946227] env[68638]: DEBUG nova.compute.manager [req-d9b8be24-5709-40c1-b35f-69e5ea7b27b1 req-91113a9d-752b-4fba-a352-155d0210c053 service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Received event network-vif-plugged-64e16852-058c-41a3-804c-d16bb756b439 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 992.946383] env[68638]: DEBUG oslo_concurrency.lockutils [req-d9b8be24-5709-40c1-b35f-69e5ea7b27b1 req-91113a9d-752b-4fba-a352-155d0210c053 service nova] Acquiring lock "da886efd-bca9-45aa-abcc-13832c66a90c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.946615] env[68638]: DEBUG oslo_concurrency.lockutils [req-d9b8be24-5709-40c1-b35f-69e5ea7b27b1 req-91113a9d-752b-4fba-a352-155d0210c053 service nova] Lock "da886efd-bca9-45aa-abcc-13832c66a90c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 992.946731] env[68638]: DEBUG oslo_concurrency.lockutils [req-d9b8be24-5709-40c1-b35f-69e5ea7b27b1 req-91113a9d-752b-4fba-a352-155d0210c053 service nova] Lock "da886efd-bca9-45aa-abcc-13832c66a90c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 992.946921] env[68638]: DEBUG nova.compute.manager [req-d9b8be24-5709-40c1-b35f-69e5ea7b27b1 req-91113a9d-752b-4fba-a352-155d0210c053 service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] No waiting events found dispatching network-vif-plugged-64e16852-058c-41a3-804c-d16bb756b439 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 992.947124] env[68638]: WARNING nova.compute.manager [req-d9b8be24-5709-40c1-b35f-69e5ea7b27b1 req-91113a9d-752b-4fba-a352-155d0210c053 service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Received unexpected event network-vif-plugged-64e16852-058c-41a3-804c-d16bb756b439 for instance with vm_state building and task_state spawning. [ 992.947233] env[68638]: DEBUG nova.compute.manager [req-d9b8be24-5709-40c1-b35f-69e5ea7b27b1 req-91113a9d-752b-4fba-a352-155d0210c053 service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Received event network-changed-64e16852-058c-41a3-804c-d16bb756b439 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 992.947421] env[68638]: DEBUG nova.compute.manager [req-d9b8be24-5709-40c1-b35f-69e5ea7b27b1 req-91113a9d-752b-4fba-a352-155d0210c053 service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Refreshing instance network info cache due to event network-changed-64e16852-058c-41a3-804c-d16bb756b439. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 992.947592] env[68638]: DEBUG oslo_concurrency.lockutils [req-d9b8be24-5709-40c1-b35f-69e5ea7b27b1 req-91113a9d-752b-4fba-a352-155d0210c053 service nova] Acquiring lock "refresh_cache-da886efd-bca9-45aa-abcc-13832c66a90c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.105324] env[68638]: DEBUG nova.network.neutron [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 993.132896] env[68638]: DEBUG nova.compute.manager [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 993.244752] env[68638]: DEBUG nova.scheduler.client.report [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 120 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 993.244866] env[68638]: DEBUG nova.compute.provider_tree [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 120 to 121 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 993.245085] env[68638]: DEBUG nova.compute.provider_tree [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 993.281362] env[68638]: DEBUG nova.network.neutron [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Updating instance_info_cache with network_info: [{"id": "64e16852-058c-41a3-804c-d16bb756b439", "address": "fa:16:3e:4b:c3:15", "network": {"id": "80119fab-23a5-4556-af67-8892a45697a4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1071287649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e89fff19d6c461e8818d182dfd7d45e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64e16852-05", "ovs_interfaceid": "64e16852-058c-41a3-804c-d16bb756b439", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.387432] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': task-2834046, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.741768} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.387708] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] f0598d8d-09a9-44ce-b4d7-cb8830a84b94/f0598d8d-09a9-44ce-b4d7-cb8830a84b94.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 993.387927] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 993.388234] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7a30d31c-bf85-4d0d-9297-ebd76e2cfef6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.396254] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Waiting for the task: (returnval){ [ 993.396254] env[68638]: value = "task-2834048" [ 993.396254] env[68638]: _type = "Task" [ 993.396254] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.410409] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Task: {'id': task-2834047, 'name': Rename_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.414122] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': task-2834048, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.656763] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.751064] env[68638]: DEBUG oslo_concurrency.lockutils [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.132s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.753260] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.495s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.753720] env[68638]: DEBUG nova.objects.instance [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lazy-loading 'resources' on Instance uuid 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 993.771081] env[68638]: INFO nova.scheduler.client.report [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Deleted allocations for instance 039edcf8-7908-4be4-8bd3-0b55545b6f7b [ 993.783956] env[68638]: DEBUG oslo_concurrency.lockutils [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Releasing lock "refresh_cache-da886efd-bca9-45aa-abcc-13832c66a90c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 993.784522] env[68638]: DEBUG nova.compute.manager [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Instance network_info: |[{"id": "64e16852-058c-41a3-804c-d16bb756b439", "address": "fa:16:3e:4b:c3:15", "network": {"id": "80119fab-23a5-4556-af67-8892a45697a4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1071287649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e89fff19d6c461e8818d182dfd7d45e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64e16852-05", "ovs_interfaceid": "64e16852-058c-41a3-804c-d16bb756b439", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 993.785019] env[68638]: DEBUG oslo_concurrency.lockutils [req-d9b8be24-5709-40c1-b35f-69e5ea7b27b1 req-91113a9d-752b-4fba-a352-155d0210c053 service nova] Acquired lock "refresh_cache-da886efd-bca9-45aa-abcc-13832c66a90c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 993.785324] env[68638]: DEBUG nova.network.neutron [req-d9b8be24-5709-40c1-b35f-69e5ea7b27b1 req-91113a9d-752b-4fba-a352-155d0210c053 service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Refreshing network info cache for port 64e16852-058c-41a3-804c-d16bb756b439 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 993.786891] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:c3:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b5c34919-7d52-4a52-bab1-81af4c8182ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '64e16852-058c-41a3-804c-d16bb756b439', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 993.796170] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Creating folder: Project (4e89fff19d6c461e8818d182dfd7d45e). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 993.797251] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fab94630-ae11-4666-a8ae-f2f8060ae30c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.811538] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Created folder: Project (4e89fff19d6c461e8818d182dfd7d45e) in parent group-v569734. [ 993.811735] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Creating folder: Instances. Parent ref: group-v569978. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 993.811972] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d2868c60-bfbb-4ea3-b455-b2788f4b72f7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.822594] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Created folder: Instances in parent group-v569978. [ 993.822869] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 993.823066] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 993.823285] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3d9ed19e-378c-43d9-9e4c-be03bc6933b3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.843255] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 993.843255] env[68638]: value = "task-2834051" [ 993.843255] env[68638]: _type = "Task" [ 993.843255] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.851201] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834051, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.909092] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': task-2834048, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.158937} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.912212] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 993.912572] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Task: {'id': task-2834047, 'name': Rename_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.913325] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d2c44d-3688-4e68-a145-8f5ea7e8fe4d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.937575] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] f0598d8d-09a9-44ce-b4d7-cb8830a84b94/f0598d8d-09a9-44ce-b4d7-cb8830a84b94.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 993.937882] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e30a38a4-820b-4ed9-b198-df3d259a00e2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.957713] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Waiting for the task: (returnval){ [ 993.957713] env[68638]: value = "task-2834052" [ 993.957713] env[68638]: _type = "Task" [ 993.957713] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.969499] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': task-2834052, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.280821] env[68638]: DEBUG oslo_concurrency.lockutils [None req-49c799da-af89-448d-a255-2a2afddcef36 tempest-ServersTestBootFromVolume-1957324576 tempest-ServersTestBootFromVolume-1957324576-project-member] Lock "039edcf8-7908-4be4-8bd3-0b55545b6f7b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.306s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.353549] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834051, 'name': CreateVM_Task, 'duration_secs': 0.400409} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.353738] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 994.354424] env[68638]: DEBUG oslo_concurrency.lockutils [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.354597] env[68638]: DEBUG oslo_concurrency.lockutils [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 994.354906] env[68638]: DEBUG oslo_concurrency.lockutils [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 994.355209] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3644b20-aaf8-45b0-b030-42a95697674b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.359623] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Waiting for the task: (returnval){ [ 994.359623] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]523dce9d-356f-b897-6c4c-f6f2ab8a2995" [ 994.359623] env[68638]: _type = "Task" [ 994.359623] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.371580] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523dce9d-356f-b897-6c4c-f6f2ab8a2995, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.412208] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Task: {'id': task-2834047, 'name': Rename_Task, 'duration_secs': 1.233731} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.412532] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 994.412760] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b4e31f4-e316-4632-8f4f-805518ff433f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.424362] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Waiting for the task: (returnval){ [ 994.424362] env[68638]: value = "task-2834053" [ 994.424362] env[68638]: _type = "Task" [ 994.424362] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.436601] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Task: {'id': task-2834053, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.472149] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': task-2834052, 'name': ReconfigVM_Task, 'duration_secs': 0.347584} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.472724] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Reconfigured VM instance instance-00000057 to attach disk [datastore1] f0598d8d-09a9-44ce-b4d7-cb8830a84b94/f0598d8d-09a9-44ce-b4d7-cb8830a84b94.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 994.473716] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a5c13c8f-2205-4358-b4bc-433514c25a6b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.480054] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Waiting for the task: (returnval){ [ 994.480054] env[68638]: value = "task-2834054" [ 994.480054] env[68638]: _type = "Task" [ 994.480054] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.491704] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': task-2834054, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.597908] env[68638]: DEBUG nova.network.neutron [req-d9b8be24-5709-40c1-b35f-69e5ea7b27b1 req-91113a9d-752b-4fba-a352-155d0210c053 service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Updated VIF entry in instance network info cache for port 64e16852-058c-41a3-804c-d16bb756b439. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 994.597908] env[68638]: DEBUG nova.network.neutron [req-d9b8be24-5709-40c1-b35f-69e5ea7b27b1 req-91113a9d-752b-4fba-a352-155d0210c053 service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Updating instance_info_cache with network_info: [{"id": "64e16852-058c-41a3-804c-d16bb756b439", "address": "fa:16:3e:4b:c3:15", "network": {"id": "80119fab-23a5-4556-af67-8892a45697a4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1071287649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e89fff19d6c461e8818d182dfd7d45e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64e16852-05", "ovs_interfaceid": "64e16852-058c-41a3-804c-d16bb756b439", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.649800] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea8eea5-a1db-4ff4-a705-eb09c222eccd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.658837] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa6255b-275c-4f46-8135-659aa2f5e063 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.691385] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c359a6f-a6d1-4306-a0e9-7249a76b7bf0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.699703] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beb3c956-586a-45ef-b58b-f50e17c30e89 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.714080] env[68638]: DEBUG nova.compute.provider_tree [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 994.871691] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523dce9d-356f-b897-6c4c-f6f2ab8a2995, 'name': SearchDatastore_Task, 'duration_secs': 0.029615} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.871867] env[68638]: DEBUG oslo_concurrency.lockutils [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 994.871971] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 994.872233] env[68638]: DEBUG oslo_concurrency.lockutils [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.872377] env[68638]: DEBUG oslo_concurrency.lockutils [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 994.872557] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 994.872831] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bbf8f895-4bf8-43bf-a2c8-6d6ee1bb7856 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.882958] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 994.883221] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 994.883909] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7fc1d34-c365-4431-a3cb-319c3e2cd9cd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.889527] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Waiting for the task: (returnval){ [ 994.889527] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]524718cc-1bab-2097-c46d-2ca1ad74ed0d" [ 994.889527] env[68638]: _type = "Task" [ 994.889527] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.900055] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]524718cc-1bab-2097-c46d-2ca1ad74ed0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.936252] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Task: {'id': task-2834053, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.990020] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': task-2834054, 'name': Rename_Task, 'duration_secs': 0.195472} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.990348] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 994.990595] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f5adbd2b-10b5-42a8-902e-62e164183bb8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.996990] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Waiting for the task: (returnval){ [ 994.996990] env[68638]: value = "task-2834055" [ 994.996990] env[68638]: _type = "Task" [ 994.996990] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.005131] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': task-2834055, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.100098] env[68638]: DEBUG oslo_concurrency.lockutils [req-d9b8be24-5709-40c1-b35f-69e5ea7b27b1 req-91113a9d-752b-4fba-a352-155d0210c053 service nova] Releasing lock "refresh_cache-da886efd-bca9-45aa-abcc-13832c66a90c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 995.218484] env[68638]: DEBUG nova.scheduler.client.report [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 995.399824] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]524718cc-1bab-2097-c46d-2ca1ad74ed0d, 'name': SearchDatastore_Task, 'duration_secs': 0.011613} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.400810] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ff6e9c4-c21e-4d84-ab3d-54bc9df1aa67 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.406225] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Waiting for the task: (returnval){ [ 995.406225] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52441f72-e60f-0c50-2d9c-6e15d28e6b31" [ 995.406225] env[68638]: _type = "Task" [ 995.406225] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.415012] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52441f72-e60f-0c50-2d9c-6e15d28e6b31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.436431] env[68638]: DEBUG oslo_vmware.api [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Task: {'id': task-2834053, 'name': PowerOnVM_Task, 'duration_secs': 0.685241} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.437015] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 995.437398] env[68638]: INFO nova.compute.manager [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Took 6.36 seconds to spawn the instance on the hypervisor. [ 995.437689] env[68638]: DEBUG nova.compute.manager [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 995.438909] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3dd8f2b-6029-4b54-b367-7056034d83f5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.509631] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': task-2834055, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.725689] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.972s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 995.731027] env[68638]: DEBUG oslo_concurrency.lockutils [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.488s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 995.731027] env[68638]: DEBUG nova.objects.instance [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Lazy-loading 'resources' on Instance uuid 30193a76-a391-4a64-98cc-7e22dcf7218c {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 995.753572] env[68638]: INFO nova.scheduler.client.report [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Deleted allocations for instance 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e [ 995.920738] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52441f72-e60f-0c50-2d9c-6e15d28e6b31, 'name': SearchDatastore_Task, 'duration_secs': 0.01383} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.920910] env[68638]: DEBUG oslo_concurrency.lockutils [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 995.921257] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] da886efd-bca9-45aa-abcc-13832c66a90c/da886efd-bca9-45aa-abcc-13832c66a90c.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 995.921589] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f4f287fb-8b67-4520-896f-fde1682dbdc2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.928408] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Waiting for the task: (returnval){ [ 995.928408] env[68638]: value = "task-2834056" [ 995.928408] env[68638]: _type = "Task" [ 995.928408] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.937357] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': task-2834056, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.964426] env[68638]: INFO nova.compute.manager [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Took 38.39 seconds to build instance. [ 996.007694] env[68638]: DEBUG oslo_vmware.api [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': task-2834055, 'name': PowerOnVM_Task, 'duration_secs': 0.556505} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.007945] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 996.008690] env[68638]: INFO nova.compute.manager [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Took 10.46 seconds to spawn the instance on the hypervisor. [ 996.008901] env[68638]: DEBUG nova.compute.manager [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 996.009707] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa96aee-aa07-4e97-a0a6-708b9fae20b4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.265121] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ae2e52d1-95e8-40ab-80bf-300360b233be tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "9c0d1c2d-88ea-40be-aef1-43b37b4dca3e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.679s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.438352] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': task-2834056, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.498268} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.438644] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] da886efd-bca9-45aa-abcc-13832c66a90c/da886efd-bca9-45aa-abcc-13832c66a90c.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 996.438856] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 996.439134] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9c5eed4a-9cb5-46d0-b066-a1e804ba89d9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.445832] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Waiting for the task: (returnval){ [ 996.445832] env[68638]: value = "task-2834057" [ 996.445832] env[68638]: _type = "Task" [ 996.445832] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.455718] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': task-2834057, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.466624] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2be844d2-7426-409c-81b2-eca1defb5007 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Lock "fd329f9d-daf3-47ff-9c48-e1355fc012f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.902s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.526192] env[68638]: INFO nova.compute.manager [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Took 39.58 seconds to build instance. [ 996.599123] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f80e8d52-4385-4643-9f53-640c74a63823 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.607878] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ff17d8-78bd-42a1-8516-24acb762b26c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.638247] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1997d04-1c4e-477d-adc0-9d9524084ad8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.642680] env[68638]: DEBUG nova.compute.manager [None req-722b8f25-0b8b-4016-8bca-26bc9d2b4528 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 996.645442] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-316eda15-0413-42fd-8a76-b961f0a1f4b4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.651307] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4cd576c-554b-4eb9-9242-73baa561c86e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.668402] env[68638]: DEBUG nova.compute.provider_tree [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 996.716589] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Acquiring lock "fd329f9d-daf3-47ff-9c48-e1355fc012f4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.716836] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Lock "fd329f9d-daf3-47ff-9c48-e1355fc012f4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.717143] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Acquiring lock "fd329f9d-daf3-47ff-9c48-e1355fc012f4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.717547] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Lock "fd329f9d-daf3-47ff-9c48-e1355fc012f4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.717767] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Lock "fd329f9d-daf3-47ff-9c48-e1355fc012f4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.720132] env[68638]: INFO nova.compute.manager [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Terminating instance [ 996.960063] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': task-2834057, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08076} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.960063] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 996.960063] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a962b013-861e-475f-a2fc-92b788166a62 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.985383] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] da886efd-bca9-45aa-abcc-13832c66a90c/da886efd-bca9-45aa-abcc-13832c66a90c.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 996.985893] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3922333-59ba-43ac-b3ca-6ad0e999e7c2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.010661] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Waiting for the task: (returnval){ [ 997.010661] env[68638]: value = "task-2834058" [ 997.010661] env[68638]: _type = "Task" [ 997.010661] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.019200] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': task-2834058, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.030991] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f460620-83fe-4884-8500-f4677646bde0 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Lock "f0598d8d-09a9-44ce-b4d7-cb8830a84b94" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.101s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.171754] env[68638]: INFO nova.compute.manager [None req-722b8f25-0b8b-4016-8bca-26bc9d2b4528 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] instance snapshotting [ 997.171754] env[68638]: DEBUG nova.objects.instance [None req-722b8f25-0b8b-4016-8bca-26bc9d2b4528 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Lazy-loading 'flavor' on Instance uuid fd329f9d-daf3-47ff-9c48-e1355fc012f4 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 997.178032] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "4edaaa5d-535a-4c63-ab44-724548a0f3eb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.178032] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "4edaaa5d-535a-4c63-ab44-724548a0f3eb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.178308] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "4edaaa5d-535a-4c63-ab44-724548a0f3eb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.178638] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "4edaaa5d-535a-4c63-ab44-724548a0f3eb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.179361] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "4edaaa5d-535a-4c63-ab44-724548a0f3eb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.182700] env[68638]: INFO nova.compute.manager [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Terminating instance [ 997.216528] env[68638]: DEBUG nova.scheduler.client.report [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 121 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 997.216877] env[68638]: DEBUG nova.compute.provider_tree [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 121 to 122 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 997.217174] env[68638]: DEBUG nova.compute.provider_tree [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 997.224731] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Acquiring lock "refresh_cache-fd329f9d-daf3-47ff-9c48-e1355fc012f4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.224903] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Acquired lock "refresh_cache-fd329f9d-daf3-47ff-9c48-e1355fc012f4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.225804] env[68638]: DEBUG nova.network.neutron [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 997.289181] env[68638]: DEBUG oslo_concurrency.lockutils [None req-58f7c55e-5d24-4689-9896-50dee2892288 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Acquiring lock "f0598d8d-09a9-44ce-b4d7-cb8830a84b94" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.289441] env[68638]: DEBUG oslo_concurrency.lockutils [None req-58f7c55e-5d24-4689-9896-50dee2892288 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Lock "f0598d8d-09a9-44ce-b4d7-cb8830a84b94" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.289672] env[68638]: INFO nova.compute.manager [None req-58f7c55e-5d24-4689-9896-50dee2892288 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Rebooting instance [ 997.523989] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': task-2834058, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.685817] env[68638]: DEBUG nova.compute.manager [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 997.686097] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 997.687232] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53cbc84-5047-4b3c-acca-d21632dc6b9d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.690426] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64cdb351-39a7-4554-8f4a-e12783915511 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.708746] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e1e40b2-d594-4c68-9946-178ef84d5858 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.711728] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 997.711728] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30a2a742-7f15-46a6-905d-fa5beeb65051 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.723548] env[68638]: DEBUG oslo_concurrency.lockutils [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.994s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.725721] env[68638]: DEBUG oslo_vmware.api [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 997.725721] env[68638]: value = "task-2834059" [ 997.725721] env[68638]: _type = "Task" [ 997.725721] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.726462] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.596s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.726683] env[68638]: DEBUG nova.objects.instance [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Lazy-loading 'resources' on Instance uuid ee752ace-fa19-4fd7-af89-f6628ce3d087 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 997.739730] env[68638]: DEBUG oslo_vmware.api [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2834059, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.755625] env[68638]: DEBUG nova.network.neutron [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 997.758546] env[68638]: INFO nova.scheduler.client.report [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Deleted allocations for instance 30193a76-a391-4a64-98cc-7e22dcf7218c [ 997.811936] env[68638]: DEBUG oslo_concurrency.lockutils [None req-58f7c55e-5d24-4689-9896-50dee2892288 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Acquiring lock "refresh_cache-f0598d8d-09a9-44ce-b4d7-cb8830a84b94" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.812153] env[68638]: DEBUG oslo_concurrency.lockutils [None req-58f7c55e-5d24-4689-9896-50dee2892288 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Acquired lock "refresh_cache-f0598d8d-09a9-44ce-b4d7-cb8830a84b94" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.812336] env[68638]: DEBUG nova.network.neutron [None req-58f7c55e-5d24-4689-9896-50dee2892288 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 997.817134] env[68638]: DEBUG nova.network.neutron [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.026547] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': task-2834058, 'name': ReconfigVM_Task, 'duration_secs': 0.788859} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.026829] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Reconfigured VM instance instance-00000059 to attach disk [datastore2] da886efd-bca9-45aa-abcc-13832c66a90c/da886efd-bca9-45aa-abcc-13832c66a90c.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 998.027481] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b7c53f82-4246-4fc0-8095-6fb1e26e7ffa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.033584] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Waiting for the task: (returnval){ [ 998.033584] env[68638]: value = "task-2834060" [ 998.033584] env[68638]: _type = "Task" [ 998.033584] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.041900] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': task-2834060, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.228823] env[68638]: DEBUG nova.compute.manager [None req-722b8f25-0b8b-4016-8bca-26bc9d2b4528 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Instance disappeared during snapshot {{(pid=68638) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 998.252880] env[68638]: DEBUG oslo_vmware.api [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2834059, 'name': PowerOffVM_Task, 'duration_secs': 0.286802} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.254528] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 998.254718] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 998.255019] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f7ff9fe8-ee23-47c7-a079-cc1ac8194cc2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.277680] env[68638]: DEBUG oslo_concurrency.lockutils [None req-328f629f-d47f-4efe-9b5e-ec3ebde0a8d2 tempest-ServersTestJSON-1549970154 tempest-ServersTestJSON-1549970154-project-member] Lock "30193a76-a391-4a64-98cc-7e22dcf7218c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.910s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 998.319433] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Releasing lock "refresh_cache-fd329f9d-daf3-47ff-9c48-e1355fc012f4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 998.319940] env[68638]: DEBUG nova.compute.manager [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 998.320680] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 998.322292] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5990db2e-5013-4c4a-bbb7-0c843c69d35f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.334268] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 998.335248] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 998.335248] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Deleting the datastore file [datastore1] 4edaaa5d-535a-4c63-ab44-724548a0f3eb {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 998.335248] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 998.335468] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2bb77609-7964-414c-bafa-9340b7fef491 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.337253] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f25fe666-21f8-4995-8d4d-3a19df20ae22 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.345494] env[68638]: DEBUG oslo_vmware.api [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Waiting for the task: (returnval){ [ 998.345494] env[68638]: value = "task-2834062" [ 998.345494] env[68638]: _type = "Task" [ 998.345494] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.346088] env[68638]: DEBUG oslo_vmware.api [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for the task: (returnval){ [ 998.346088] env[68638]: value = "task-2834063" [ 998.346088] env[68638]: _type = "Task" [ 998.346088] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.414761] env[68638]: DEBUG oslo_vmware.api [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Task: {'id': task-2834062, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.414761] env[68638]: DEBUG oslo_vmware.api [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2834063, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.426394] env[68638]: DEBUG nova.compute.manager [None req-722b8f25-0b8b-4016-8bca-26bc9d2b4528 tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Found 0 images (rotation: 2) {{(pid=68638) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 998.547292] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': task-2834060, 'name': Rename_Task, 'duration_secs': 0.166388} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.550171] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 998.550695] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-451fc7f7-92dc-4096-abe7-0fb25ab94a02 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.558592] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Waiting for the task: (returnval){ [ 998.558592] env[68638]: value = "task-2834064" [ 998.558592] env[68638]: _type = "Task" [ 998.558592] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.575344] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': task-2834064, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.720137] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78915434-eab1-496e-b795-62fe1deb3aca {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.729851] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c03f5acb-c8b8-4893-85da-d35aaa3ae8a6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.733344] env[68638]: DEBUG oslo_concurrency.lockutils [None req-978e7575-e462-4aac-814d-c975901ed87c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "14c1dba5-98cb-4ebd-8e76-60b3f74cca4b" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.733718] env[68638]: DEBUG oslo_concurrency.lockutils [None req-978e7575-e462-4aac-814d-c975901ed87c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "14c1dba5-98cb-4ebd-8e76-60b3f74cca4b" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.765093] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a597010e-6fb1-450a-94b7-bc90bfeed6e3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.769980] env[68638]: DEBUG nova.network.neutron [None req-58f7c55e-5d24-4689-9896-50dee2892288 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Updating instance_info_cache with network_info: [{"id": "4d549532-6530-40bb-95cf-fba098d9c5d8", "address": "fa:16:3e:23:b1:b3", "network": {"id": "e967a4ea-9ab0-4ed5-b14b-813f7a59308f", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1273332228-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87e8ae27bc22417a82dce8d4fe1ca8e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d549532-65", "ovs_interfaceid": "4d549532-6530-40bb-95cf-fba098d9c5d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.776672] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc6cfb4-ab2e-4a20-9882-41f0e3877bb6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.796020] env[68638]: DEBUG nova.compute.provider_tree [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 998.856248] env[68638]: DEBUG oslo_vmware.api [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Task: {'id': task-2834062, 'name': PowerOffVM_Task, 'duration_secs': 0.211662} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.859487] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 998.859775] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 998.859883] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5366097f-59df-4a50-9fc1-2c51b46bc5bd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.866304] env[68638]: DEBUG oslo_vmware.api [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Task: {'id': task-2834063, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165092} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.866612] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 998.867576] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 998.867576] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 998.867576] env[68638]: INFO nova.compute.manager [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Took 1.18 seconds to destroy the instance on the hypervisor. [ 998.868325] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 998.868325] env[68638]: DEBUG nova.compute.manager [-] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 998.868325] env[68638]: DEBUG nova.network.neutron [-] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 998.883795] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 998.884039] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 998.884227] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Deleting the datastore file [datastore1] fd329f9d-daf3-47ff-9c48-e1355fc012f4 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 998.884539] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d143f3ed-2506-4e49-b936-ec2c2ab850d3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.890808] env[68638]: DEBUG oslo_vmware.api [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Waiting for the task: (returnval){ [ 998.890808] env[68638]: value = "task-2834066" [ 998.890808] env[68638]: _type = "Task" [ 998.890808] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.899391] env[68638]: DEBUG oslo_vmware.api [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Task: {'id': task-2834066, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.068687] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': task-2834064, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.237177] env[68638]: INFO nova.compute.manager [None req-978e7575-e462-4aac-814d-c975901ed87c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Detaching volume 3242773b-24c0-4e87-8db6-f2d6f9823068 [ 999.272689] env[68638]: DEBUG oslo_concurrency.lockutils [None req-58f7c55e-5d24-4689-9896-50dee2892288 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Releasing lock "refresh_cache-f0598d8d-09a9-44ce-b4d7-cb8830a84b94" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 999.297149] env[68638]: INFO nova.virt.block_device [None req-978e7575-e462-4aac-814d-c975901ed87c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Attempting to driver detach volume 3242773b-24c0-4e87-8db6-f2d6f9823068 from mountpoint /dev/sdb [ 999.297477] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-978e7575-e462-4aac-814d-c975901ed87c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Volume detach. Driver type: vmdk {{(pid=68638) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 999.297802] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-978e7575-e462-4aac-814d-c975901ed87c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569954', 'volume_id': '3242773b-24c0-4e87-8db6-f2d6f9823068', 'name': 'volume-3242773b-24c0-4e87-8db6-f2d6f9823068', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '14c1dba5-98cb-4ebd-8e76-60b3f74cca4b', 'attached_at': '', 'detached_at': '', 'volume_id': '3242773b-24c0-4e87-8db6-f2d6f9823068', 'serial': '3242773b-24c0-4e87-8db6-f2d6f9823068'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 999.298875] env[68638]: DEBUG nova.scheduler.client.report [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 999.307328] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf1e435-f6b9-4364-9cac-2ce9ea096e74 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.335029] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-669ac7ec-f475-4360-aa4d-990b382aadb3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.344875] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3cad95-70cd-45b2-aac9-4d1dedfda960 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.354947] env[68638]: DEBUG nova.compute.manager [req-4e0ea262-3a02-4814-939f-7394ac73a00e req-22bdbc3d-68ca-4e53-916b-5f134b394b8e service nova] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Received event network-vif-deleted-a9fa307a-55b9-4398-b9a3-75870a0519ca {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 999.354947] env[68638]: INFO nova.compute.manager [req-4e0ea262-3a02-4814-939f-7394ac73a00e req-22bdbc3d-68ca-4e53-916b-5f134b394b8e service nova] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Neutron deleted interface a9fa307a-55b9-4398-b9a3-75870a0519ca; detaching it from the instance and deleting it from the info cache [ 999.354947] env[68638]: DEBUG nova.network.neutron [req-4e0ea262-3a02-4814-939f-7394ac73a00e req-22bdbc3d-68ca-4e53-916b-5f134b394b8e service nova] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.377262] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78f58deb-7a10-4978-9ea4-9c7a20185453 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.397168] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-978e7575-e462-4aac-814d-c975901ed87c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] The volume has not been displaced from its original location: [datastore2] volume-3242773b-24c0-4e87-8db6-f2d6f9823068/volume-3242773b-24c0-4e87-8db6-f2d6f9823068.vmdk. No consolidation needed. {{(pid=68638) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 999.400368] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-978e7575-e462-4aac-814d-c975901ed87c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Reconfiguring VM instance instance-00000040 to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 999.403830] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-061deee1-5976-48a7-bfbb-4a15b856c2f4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.426432] env[68638]: DEBUG oslo_vmware.api [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Task: {'id': task-2834066, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17781} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.428184] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 999.428184] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 999.428637] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 999.428685] env[68638]: INFO nova.compute.manager [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Took 1.11 seconds to destroy the instance on the hypervisor. [ 999.429501] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 999.429649] env[68638]: DEBUG oslo_vmware.api [None req-978e7575-e462-4aac-814d-c975901ed87c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 999.429649] env[68638]: value = "task-2834067" [ 999.429649] env[68638]: _type = "Task" [ 999.429649] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.429920] env[68638]: DEBUG nova.compute.manager [-] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 999.429985] env[68638]: DEBUG nova.network.neutron [-] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 999.445826] env[68638]: DEBUG oslo_vmware.api [None req-978e7575-e462-4aac-814d-c975901ed87c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834067, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.462816] env[68638]: DEBUG nova.network.neutron [-] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 999.570012] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': task-2834064, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.779202] env[68638]: DEBUG nova.compute.manager [None req-58f7c55e-5d24-4689-9896-50dee2892288 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 999.780850] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d944726-6f9b-462a-ad4c-e851ca165e32 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.807619] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.081s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.810411] env[68638]: DEBUG oslo_concurrency.lockutils [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.610s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.810648] env[68638]: DEBUG nova.objects.instance [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lazy-loading 'resources' on Instance uuid 61b9bce5-6a3e-4149-a759-d08e2e2301ee {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 999.813671] env[68638]: DEBUG nova.network.neutron [-] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.848267] env[68638]: INFO nova.scheduler.client.report [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Deleted allocations for instance ee752ace-fa19-4fd7-af89-f6628ce3d087 [ 999.858222] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6f5c15cd-b84d-4f2a-a23f-b858c0186dbf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.873030] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f755d029-d1df-4771-8902-e919f2e4eb85 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.912833] env[68638]: DEBUG nova.compute.manager [req-4e0ea262-3a02-4814-939f-7394ac73a00e req-22bdbc3d-68ca-4e53-916b-5f134b394b8e service nova] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Detach interface failed, port_id=a9fa307a-55b9-4398-b9a3-75870a0519ca, reason: Instance 4edaaa5d-535a-4c63-ab44-724548a0f3eb could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 999.946562] env[68638]: DEBUG oslo_vmware.api [None req-978e7575-e462-4aac-814d-c975901ed87c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834067, 'name': ReconfigVM_Task, 'duration_secs': 0.345652} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.948296] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-978e7575-e462-4aac-814d-c975901ed87c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Reconfigured VM instance instance-00000040 to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 999.953201] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-62408273-34b2-4fec-b88b-7eaf368d8c9d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.965403] env[68638]: DEBUG nova.network.neutron [-] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.969353] env[68638]: DEBUG oslo_vmware.api [None req-978e7575-e462-4aac-814d-c975901ed87c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 999.969353] env[68638]: value = "task-2834068" [ 999.969353] env[68638]: _type = "Task" [ 999.969353] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.978034] env[68638]: DEBUG oslo_vmware.api [None req-978e7575-e462-4aac-814d-c975901ed87c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834068, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.073963] env[68638]: DEBUG oslo_vmware.api [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': task-2834064, 'name': PowerOnVM_Task, 'duration_secs': 1.026848} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.074435] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1000.074689] env[68638]: INFO nova.compute.manager [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Took 8.45 seconds to spawn the instance on the hypervisor. [ 1000.074881] env[68638]: DEBUG nova.compute.manager [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1000.078842] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b955780-7e06-4813-9ab2-86d3494d2cfe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.316096] env[68638]: INFO nova.compute.manager [-] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Took 1.45 seconds to deallocate network for instance. [ 1000.360354] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dc51ce38-337a-4f5f-91f8-7c5e10f16b58 tempest-ServerRescueTestJSON-1253435358 tempest-ServerRescueTestJSON-1253435358-project-member] Lock "ee752ace-fa19-4fd7-af89-f6628ce3d087" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.785s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.470357] env[68638]: INFO nova.compute.manager [-] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Took 1.04 seconds to deallocate network for instance. [ 1000.486535] env[68638]: DEBUG oslo_vmware.api [None req-978e7575-e462-4aac-814d-c975901ed87c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834068, 'name': ReconfigVM_Task, 'duration_secs': 0.142663} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.486922] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-978e7575-e462-4aac-814d-c975901ed87c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569954', 'volume_id': '3242773b-24c0-4e87-8db6-f2d6f9823068', 'name': 'volume-3242773b-24c0-4e87-8db6-f2d6f9823068', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '14c1dba5-98cb-4ebd-8e76-60b3f74cca4b', 'attached_at': '', 'detached_at': '', 'volume_id': '3242773b-24c0-4e87-8db6-f2d6f9823068', 'serial': '3242773b-24c0-4e87-8db6-f2d6f9823068'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1000.602157] env[68638]: INFO nova.compute.manager [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Took 41.45 seconds to build instance. [ 1000.726195] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-779f9814-879c-499a-b6d9-e6587f8044dd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.736457] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a13e94-351e-408d-aded-7924654781e9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.773648] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "4765bf70-1a72-4102-b5d3-ccedb7c383ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.774021] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "4765bf70-1a72-4102-b5d3-ccedb7c383ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.776633] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda937da-8ccf-4160-9d27-d5b8f7550f0e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.784370] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca59292b-2be4-4ef5-8a18-66e41099cc74 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.801085] env[68638]: DEBUG nova.compute.provider_tree [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1000.802633] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca12f252-986f-4b35-878d-f3c588cb35a9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.809899] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-58f7c55e-5d24-4689-9896-50dee2892288 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Doing hard reboot of VM {{(pid=68638) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1000.810743] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-035d427e-311b-4ab8-b542-eac85929be54 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.817176] env[68638]: DEBUG oslo_vmware.api [None req-58f7c55e-5d24-4689-9896-50dee2892288 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Waiting for the task: (returnval){ [ 1000.817176] env[68638]: value = "task-2834069" [ 1000.817176] env[68638]: _type = "Task" [ 1000.817176] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.821981] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.825735] env[68638]: DEBUG oslo_vmware.api [None req-58f7c55e-5d24-4689-9896-50dee2892288 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': task-2834069, 'name': ResetVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.982309] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.058581] env[68638]: DEBUG nova.objects.instance [None req-978e7575-e462-4aac-814d-c975901ed87c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lazy-loading 'flavor' on Instance uuid 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1001.103761] env[68638]: DEBUG oslo_concurrency.lockutils [None req-72fb8c12-6e54-420b-9ced-6e37ed5dae61 tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Lock "da886efd-bca9-45aa-abcc-13832c66a90c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.960s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1001.278442] env[68638]: DEBUG nova.compute.manager [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1001.309021] env[68638]: DEBUG nova.scheduler.client.report [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1001.329215] env[68638]: DEBUG oslo_vmware.api [None req-58f7c55e-5d24-4689-9896-50dee2892288 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': task-2834069, 'name': ResetVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.636573] env[68638]: DEBUG nova.compute.manager [req-487b0133-defd-4195-9e03-6aae1605bc0c req-1892095a-2016-406a-af1c-235f4b3c4c3b service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Received event network-changed-64e16852-058c-41a3-804c-d16bb756b439 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1001.636788] env[68638]: DEBUG nova.compute.manager [req-487b0133-defd-4195-9e03-6aae1605bc0c req-1892095a-2016-406a-af1c-235f4b3c4c3b service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Refreshing instance network info cache due to event network-changed-64e16852-058c-41a3-804c-d16bb756b439. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1001.637619] env[68638]: DEBUG oslo_concurrency.lockutils [req-487b0133-defd-4195-9e03-6aae1605bc0c req-1892095a-2016-406a-af1c-235f4b3c4c3b service nova] Acquiring lock "refresh_cache-da886efd-bca9-45aa-abcc-13832c66a90c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.637941] env[68638]: DEBUG oslo_concurrency.lockutils [req-487b0133-defd-4195-9e03-6aae1605bc0c req-1892095a-2016-406a-af1c-235f4b3c4c3b service nova] Acquired lock "refresh_cache-da886efd-bca9-45aa-abcc-13832c66a90c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1001.638159] env[68638]: DEBUG nova.network.neutron [req-487b0133-defd-4195-9e03-6aae1605bc0c req-1892095a-2016-406a-af1c-235f4b3c4c3b service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Refreshing network info cache for port 64e16852-058c-41a3-804c-d16bb756b439 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1001.803889] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.815245] env[68638]: DEBUG oslo_concurrency.lockutils [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.005s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1001.823741] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.042s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1001.824096] env[68638]: INFO nova.compute.claims [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1001.841387] env[68638]: DEBUG oslo_vmware.api [None req-58f7c55e-5d24-4689-9896-50dee2892288 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': task-2834069, 'name': ResetVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.855812] env[68638]: INFO nova.scheduler.client.report [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Deleted allocations for instance 61b9bce5-6a3e-4149-a759-d08e2e2301ee [ 1002.072384] env[68638]: DEBUG oslo_concurrency.lockutils [None req-978e7575-e462-4aac-814d-c975901ed87c tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "14c1dba5-98cb-4ebd-8e76-60b3f74cca4b" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.338s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.208235] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Acquiring lock "2cdcff10-089b-47fd-ba41-2e3a75cd33b0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.208520] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Lock "2cdcff10-089b-47fd-ba41-2e3a75cd33b0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.304220] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "14c1dba5-98cb-4ebd-8e76-60b3f74cca4b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.304583] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "14c1dba5-98cb-4ebd-8e76-60b3f74cca4b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.304974] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "14c1dba5-98cb-4ebd-8e76-60b3f74cca4b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.305224] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "14c1dba5-98cb-4ebd-8e76-60b3f74cca4b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.305478] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "14c1dba5-98cb-4ebd-8e76-60b3f74cca4b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.309932] env[68638]: INFO nova.compute.manager [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Terminating instance [ 1002.346518] env[68638]: DEBUG oslo_vmware.api [None req-58f7c55e-5d24-4689-9896-50dee2892288 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': task-2834069, 'name': ResetVM_Task, 'duration_secs': 1.114693} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.350278] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-58f7c55e-5d24-4689-9896-50dee2892288 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Did hard reboot of VM {{(pid=68638) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1002.350278] env[68638]: DEBUG nova.compute.manager [None req-58f7c55e-5d24-4689-9896-50dee2892288 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1002.350278] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87356af2-151e-408a-a210-014514915d4c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.365900] env[68638]: DEBUG oslo_concurrency.lockutils [None req-323dab92-3da4-4b8c-87d1-e9f92cac3236 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "61b9bce5-6a3e-4149-a759-d08e2e2301ee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.908s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.393885] env[68638]: DEBUG nova.network.neutron [req-487b0133-defd-4195-9e03-6aae1605bc0c req-1892095a-2016-406a-af1c-235f4b3c4c3b service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Updated VIF entry in instance network info cache for port 64e16852-058c-41a3-804c-d16bb756b439. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1002.394329] env[68638]: DEBUG nova.network.neutron [req-487b0133-defd-4195-9e03-6aae1605bc0c req-1892095a-2016-406a-af1c-235f4b3c4c3b service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Updating instance_info_cache with network_info: [{"id": "64e16852-058c-41a3-804c-d16bb756b439", "address": "fa:16:3e:4b:c3:15", "network": {"id": "80119fab-23a5-4556-af67-8892a45697a4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1071287649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e89fff19d6c461e8818d182dfd7d45e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64e16852-05", "ovs_interfaceid": "64e16852-058c-41a3-804c-d16bb756b439", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.715589] env[68638]: DEBUG nova.compute.manager [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1002.818631] env[68638]: DEBUG nova.compute.manager [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1002.818874] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1002.820253] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29764655-d38b-4ac0-929a-d02ae43379fe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.828177] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1002.829109] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bca3a3fc-abe4-4857-8d4f-f85f0695c8b4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.835817] env[68638]: DEBUG oslo_vmware.api [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1002.835817] env[68638]: value = "task-2834070" [ 1002.835817] env[68638]: _type = "Task" [ 1002.835817] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.846830] env[68638]: DEBUG oslo_vmware.api [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834070, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.891603] env[68638]: DEBUG oslo_concurrency.lockutils [None req-58f7c55e-5d24-4689-9896-50dee2892288 tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Lock "f0598d8d-09a9-44ce-b4d7-cb8830a84b94" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 5.591s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.906060] env[68638]: DEBUG oslo_concurrency.lockutils [req-487b0133-defd-4195-9e03-6aae1605bc0c req-1892095a-2016-406a-af1c-235f4b3c4c3b service nova] Releasing lock "refresh_cache-da886efd-bca9-45aa-abcc-13832c66a90c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1002.959506] env[68638]: DEBUG oslo_concurrency.lockutils [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "9975e756-b571-4e70-ba50-a6001d0b064c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.959506] env[68638]: DEBUG oslo_concurrency.lockutils [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "9975e756-b571-4e70-ba50-a6001d0b064c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.959506] env[68638]: DEBUG oslo_concurrency.lockutils [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "9975e756-b571-4e70-ba50-a6001d0b064c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.959506] env[68638]: DEBUG oslo_concurrency.lockutils [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "9975e756-b571-4e70-ba50-a6001d0b064c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.959506] env[68638]: DEBUG oslo_concurrency.lockutils [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "9975e756-b571-4e70-ba50-a6001d0b064c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.962701] env[68638]: INFO nova.compute.manager [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Terminating instance [ 1003.237413] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.273992] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc3ec48-cfb6-4244-a99a-3ff1c02af7f6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.283062] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a514611-b79f-441d-836a-ac23d17ad736 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.314758] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c813747-db09-488b-b53c-f7629fb495af {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.324019] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b06d0af7-6e9a-4ceb-aac4-67c02e5f6cdb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.335841] env[68638]: DEBUG nova.compute.provider_tree [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1003.344974] env[68638]: DEBUG oslo_vmware.api [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834070, 'name': PowerOffVM_Task, 'duration_secs': 0.422457} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.348190] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1003.348190] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1003.348190] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-71a0e698-fa04-4757-8b9a-4d23759538b7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.423117] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1003.423722] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1003.424016] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Deleting the datastore file [datastore1] 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1003.424308] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4cba79fe-16e9-469c-84ad-12333b61310a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.431782] env[68638]: DEBUG oslo_vmware.api [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1003.431782] env[68638]: value = "task-2834072" [ 1003.431782] env[68638]: _type = "Task" [ 1003.431782] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.440050] env[68638]: DEBUG oslo_vmware.api [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834072, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.468092] env[68638]: DEBUG nova.compute.manager [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1003.468298] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1003.469183] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b7425a-4c53-40fd-ba7b-6651d2b04e1f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.476317] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1003.476555] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-33744451-9fee-414c-8214-3683acd4bb0b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.482301] env[68638]: DEBUG oslo_vmware.api [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 1003.482301] env[68638]: value = "task-2834073" [ 1003.482301] env[68638]: _type = "Task" [ 1003.482301] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.490402] env[68638]: DEBUG oslo_vmware.api [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834073, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.521062] env[68638]: DEBUG oslo_concurrency.lockutils [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "71ec29a8-5e2f-4ccd-9c22-d9721c77622e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.521286] env[68638]: DEBUG oslo_concurrency.lockutils [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "71ec29a8-5e2f-4ccd-9c22-d9721c77622e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.609049] env[68638]: DEBUG oslo_concurrency.lockutils [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Acquiring lock "f0598d8d-09a9-44ce-b4d7-cb8830a84b94" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.609351] env[68638]: DEBUG oslo_concurrency.lockutils [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Lock "f0598d8d-09a9-44ce-b4d7-cb8830a84b94" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.609571] env[68638]: DEBUG oslo_concurrency.lockutils [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Acquiring lock "f0598d8d-09a9-44ce-b4d7-cb8830a84b94-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.609752] env[68638]: DEBUG oslo_concurrency.lockutils [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Lock "f0598d8d-09a9-44ce-b4d7-cb8830a84b94-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.611169] env[68638]: DEBUG oslo_concurrency.lockutils [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Lock "f0598d8d-09a9-44ce-b4d7-cb8830a84b94-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.613886] env[68638]: INFO nova.compute.manager [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Terminating instance [ 1003.647028] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Acquiring lock "9ddb29ae-9724-4712-af58-4b8d6546c6af" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.647263] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Lock "9ddb29ae-9724-4712-af58-4b8d6546c6af" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.841407] env[68638]: DEBUG nova.scheduler.client.report [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1003.942032] env[68638]: DEBUG oslo_vmware.api [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834072, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.443786} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.942303] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1003.942569] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1003.942664] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1003.942837] env[68638]: INFO nova.compute.manager [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1003.943123] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1003.943348] env[68638]: DEBUG nova.compute.manager [-] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1003.943445] env[68638]: DEBUG nova.network.neutron [-] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1003.991562] env[68638]: DEBUG oslo_vmware.api [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834073, 'name': PowerOffVM_Task, 'duration_secs': 0.318413} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.991914] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1003.991986] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1003.992254] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48a3e186-bbd4-407e-9fa3-c8d5da081367 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.023517] env[68638]: DEBUG nova.compute.manager [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1004.061026] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1004.061282] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1004.061514] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Deleting the datastore file [datastore1] 9975e756-b571-4e70-ba50-a6001d0b064c {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1004.062505] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c32fa10f-aeff-4044-ae9e-03ce155873a9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.070229] env[68638]: DEBUG oslo_vmware.api [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 1004.070229] env[68638]: value = "task-2834075" [ 1004.070229] env[68638]: _type = "Task" [ 1004.070229] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.079432] env[68638]: DEBUG oslo_vmware.api [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834075, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.118129] env[68638]: DEBUG nova.compute.manager [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1004.118396] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1004.119276] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dba78e5-bc75-4402-bee4-0f72de554f1d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.128080] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1004.128330] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4d222a5c-b399-48dc-9d50-da9e1de07fc2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.135201] env[68638]: DEBUG oslo_vmware.api [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Waiting for the task: (returnval){ [ 1004.135201] env[68638]: value = "task-2834076" [ 1004.135201] env[68638]: _type = "Task" [ 1004.135201] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.145400] env[68638]: DEBUG oslo_vmware.api [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': task-2834076, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.149522] env[68638]: DEBUG nova.compute.manager [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1004.346871] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.524s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.348030] env[68638]: DEBUG nova.compute.manager [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1004.352182] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.030s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1004.355037] env[68638]: INFO nova.compute.claims [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1004.562583] env[68638]: DEBUG oslo_concurrency.lockutils [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.569884] env[68638]: DEBUG nova.compute.manager [req-c66d4ec8-8d16-4dd6-bf91-d7541a3dabcb req-2adc8661-6d6b-409a-9cd7-d1ad584c1740 service nova] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Received event network-vif-deleted-05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1004.570868] env[68638]: INFO nova.compute.manager [req-c66d4ec8-8d16-4dd6-bf91-d7541a3dabcb req-2adc8661-6d6b-409a-9cd7-d1ad584c1740 service nova] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Neutron deleted interface 05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0; detaching it from the instance and deleting it from the info cache [ 1004.570868] env[68638]: DEBUG nova.network.neutron [req-c66d4ec8-8d16-4dd6-bf91-d7541a3dabcb req-2adc8661-6d6b-409a-9cd7-d1ad584c1740 service nova] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.581393] env[68638]: DEBUG oslo_vmware.api [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834075, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.287961} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.581642] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1004.581824] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1004.582014] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1004.582187] env[68638]: INFO nova.compute.manager [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1004.582427] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1004.583089] env[68638]: DEBUG nova.compute.manager [-] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1004.583191] env[68638]: DEBUG nova.network.neutron [-] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1004.645093] env[68638]: DEBUG oslo_vmware.api [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': task-2834076, 'name': PowerOffVM_Task, 'duration_secs': 0.232381} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.645388] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1004.645569] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1004.645822] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fa1b56ee-9771-47be-a81d-09da6eb5af7b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.673082] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.859785] env[68638]: DEBUG nova.compute.utils [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1004.864511] env[68638]: DEBUG nova.compute.manager [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1004.865113] env[68638]: DEBUG nova.network.neutron [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1004.866798] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1004.867052] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1004.867215] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Deleting the datastore file [datastore1] f0598d8d-09a9-44ce-b4d7-cb8830a84b94 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1004.868070] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d9a4abb-cc27-4bf5-868c-c0f53b83a9f3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.874872] env[68638]: DEBUG oslo_vmware.api [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Waiting for the task: (returnval){ [ 1004.874872] env[68638]: value = "task-2834078" [ 1004.874872] env[68638]: _type = "Task" [ 1004.874872] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.887107] env[68638]: DEBUG oslo_vmware.api [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': task-2834078, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.906123] env[68638]: DEBUG nova.policy [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '16c7d60e31234230b817fc1778234251', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3e7777e8e5d342d68e2f54e23d125314', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1005.060907] env[68638]: DEBUG nova.network.neutron [-] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.073848] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7e63726e-7832-4406-86fd-23d11a333d27 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.083018] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc1e440-5158-4b65-855c-d8d805ed93ff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.126831] env[68638]: DEBUG nova.compute.manager [req-c66d4ec8-8d16-4dd6-bf91-d7541a3dabcb req-2adc8661-6d6b-409a-9cd7-d1ad584c1740 service nova] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Detach interface failed, port_id=05aa9a68-9c62-4f2d-a54d-2a9d35e3e3f0, reason: Instance 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1005.195449] env[68638]: DEBUG nova.network.neutron [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Successfully created port: 53ebdba3-fcaa-435f-a048-dd22fa9cc3b5 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1005.295219] env[68638]: DEBUG nova.network.neutron [-] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.371845] env[68638]: DEBUG nova.compute.manager [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1005.386451] env[68638]: DEBUG oslo_vmware.api [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Task: {'id': task-2834078, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138993} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.386731] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1005.386920] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1005.387106] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1005.387285] env[68638]: INFO nova.compute.manager [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Took 1.27 seconds to destroy the instance on the hypervisor. [ 1005.387521] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1005.387711] env[68638]: DEBUG nova.compute.manager [-] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1005.387825] env[68638]: DEBUG nova.network.neutron [-] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1005.563793] env[68638]: INFO nova.compute.manager [-] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Took 1.62 seconds to deallocate network for instance. [ 1005.601742] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "0249ffb9-82ed-44db-bb20-e619eaa176dd" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.602017] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "0249ffb9-82ed-44db-bb20-e619eaa176dd" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.602242] env[68638]: INFO nova.compute.manager [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Shelving [ 1005.770758] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2633941-2f49-4478-9958-a0114ab3ffc3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.778356] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d01edc-a114-4bef-8334-7a290345ff0f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.807010] env[68638]: INFO nova.compute.manager [-] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Took 1.22 seconds to deallocate network for instance. [ 1005.809771] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f865e10b-24f2-47f8-9ed1-92661fbcd8e4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.820286] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd48404-83b2-4c43-93f1-fd24d9de82f1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.834939] env[68638]: DEBUG nova.compute.provider_tree [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1006.070456] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.228752] env[68638]: DEBUG nova.network.neutron [-] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.320806] env[68638]: DEBUG oslo_concurrency.lockutils [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.354931] env[68638]: ERROR nova.scheduler.client.report [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [req-8b484fbd-14c8-4d10-ad11-cc7e80200d3c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8b484fbd-14c8-4d10-ad11-cc7e80200d3c"}]} [ 1006.372547] env[68638]: DEBUG nova.scheduler.client.report [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1006.382204] env[68638]: DEBUG nova.compute.manager [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1006.386687] env[68638]: DEBUG nova.scheduler.client.report [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1006.386891] env[68638]: DEBUG nova.compute.provider_tree [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1006.397880] env[68638]: DEBUG nova.scheduler.client.report [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1006.406723] env[68638]: DEBUG nova.virt.hardware [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1006.406942] env[68638]: DEBUG nova.virt.hardware [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1006.407116] env[68638]: DEBUG nova.virt.hardware [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1006.407309] env[68638]: DEBUG nova.virt.hardware [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1006.407456] env[68638]: DEBUG nova.virt.hardware [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1006.407601] env[68638]: DEBUG nova.virt.hardware [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1006.407826] env[68638]: DEBUG nova.virt.hardware [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1006.407984] env[68638]: DEBUG nova.virt.hardware [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1006.408165] env[68638]: DEBUG nova.virt.hardware [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1006.408331] env[68638]: DEBUG nova.virt.hardware [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1006.408506] env[68638]: DEBUG nova.virt.hardware [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1006.409672] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d2b8b9f-dd9b-44e7-80ba-6ccbc0f3ad26 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.417687] env[68638]: DEBUG nova.scheduler.client.report [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1006.420636] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-634b0d0d-82c2-4d67-9ce0-5611c1433df4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.602137] env[68638]: DEBUG nova.compute.manager [req-ed1559f4-62dc-4c8f-bfbb-36eb3118ea6f req-83e7960b-7413-4154-9a06-21506dd75878 service nova] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Received event network-vif-deleted-cf8deeee-8158-4035-a42c-831e6b8d6f83 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1006.603717] env[68638]: DEBUG nova.compute.manager [req-ed1559f4-62dc-4c8f-bfbb-36eb3118ea6f req-83e7960b-7413-4154-9a06-21506dd75878 service nova] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Received event network-vif-deleted-4d549532-6530-40bb-95cf-fba098d9c5d8 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1006.611461] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1006.611735] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8330d626-504d-43c6-a93d-4d4d73dc751c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.619695] env[68638]: DEBUG oslo_vmware.api [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1006.619695] env[68638]: value = "task-2834079" [ 1006.619695] env[68638]: _type = "Task" [ 1006.619695] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.627807] env[68638]: DEBUG oslo_vmware.api [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834079, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.732323] env[68638]: INFO nova.compute.manager [-] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Took 1.34 seconds to deallocate network for instance. [ 1006.755059] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d5b4c1c-6657-41ea-af93-4a2ed4038fc5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.763395] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecee8cc1-bc10-4faf-96e4-7e40e1487c5b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.796237] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c75ef8-cd29-4b27-b698-dd800ccf7cdd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.799613] env[68638]: DEBUG nova.compute.manager [req-ce99cd50-fe23-4faf-ab1b-e959f3712c1d req-7ece6dbb-ec08-4831-a2f5-70346f9538b9 service nova] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Received event network-vif-plugged-53ebdba3-fcaa-435f-a048-dd22fa9cc3b5 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1006.799825] env[68638]: DEBUG oslo_concurrency.lockutils [req-ce99cd50-fe23-4faf-ab1b-e959f3712c1d req-7ece6dbb-ec08-4831-a2f5-70346f9538b9 service nova] Acquiring lock "df2e066d-7c71-4aec-ab9b-a339a7ff21fb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.800044] env[68638]: DEBUG oslo_concurrency.lockutils [req-ce99cd50-fe23-4faf-ab1b-e959f3712c1d req-7ece6dbb-ec08-4831-a2f5-70346f9538b9 service nova] Lock "df2e066d-7c71-4aec-ab9b-a339a7ff21fb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.800257] env[68638]: DEBUG oslo_concurrency.lockutils [req-ce99cd50-fe23-4faf-ab1b-e959f3712c1d req-7ece6dbb-ec08-4831-a2f5-70346f9538b9 service nova] Lock "df2e066d-7c71-4aec-ab9b-a339a7ff21fb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.800384] env[68638]: DEBUG nova.compute.manager [req-ce99cd50-fe23-4faf-ab1b-e959f3712c1d req-7ece6dbb-ec08-4831-a2f5-70346f9538b9 service nova] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] No waiting events found dispatching network-vif-plugged-53ebdba3-fcaa-435f-a048-dd22fa9cc3b5 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1006.800569] env[68638]: WARNING nova.compute.manager [req-ce99cd50-fe23-4faf-ab1b-e959f3712c1d req-7ece6dbb-ec08-4831-a2f5-70346f9538b9 service nova] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Received unexpected event network-vif-plugged-53ebdba3-fcaa-435f-a048-dd22fa9cc3b5 for instance with vm_state building and task_state spawning. [ 1006.806166] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b5734c3-7cf9-4a2c-bb7f-ba5a23e8cad7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.821271] env[68638]: DEBUG nova.compute.provider_tree [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1007.130572] env[68638]: DEBUG oslo_vmware.api [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834079, 'name': PowerOffVM_Task, 'duration_secs': 0.303551} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.130572] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1007.131040] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1f91d3e-a841-427b-8147-b7cf08c14bca {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.148914] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17eee31c-06d9-4099-82d3-9c7f897139d7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.240078] env[68638]: DEBUG oslo_concurrency.lockutils [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.344604] env[68638]: DEBUG nova.network.neutron [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Successfully updated port: 53ebdba3-fcaa-435f-a048-dd22fa9cc3b5 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1007.357054] env[68638]: DEBUG nova.scheduler.client.report [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 126 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1007.357318] env[68638]: DEBUG nova.compute.provider_tree [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 126 to 127 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1007.357563] env[68638]: DEBUG nova.compute.provider_tree [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1007.659039] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Creating Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1007.659039] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-10eb6ec4-e10e-4541-a6b3-8e69003692d6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.666865] env[68638]: DEBUG oslo_vmware.api [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1007.666865] env[68638]: value = "task-2834080" [ 1007.666865] env[68638]: _type = "Task" [ 1007.666865] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.676056] env[68638]: DEBUG oslo_vmware.api [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834080, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.846975] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "refresh_cache-df2e066d-7c71-4aec-ab9b-a339a7ff21fb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.847203] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquired lock "refresh_cache-df2e066d-7c71-4aec-ab9b-a339a7ff21fb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1007.847441] env[68638]: DEBUG nova.network.neutron [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1007.862803] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.511s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.863407] env[68638]: DEBUG nova.compute.manager [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1007.866107] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.033s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.866339] env[68638]: DEBUG nova.objects.instance [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lazy-loading 'resources' on Instance uuid ea8f58dc-1542-4723-bf86-369d4dff5f25 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1008.176618] env[68638]: DEBUG oslo_vmware.api [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834080, 'name': CreateSnapshot_Task, 'duration_secs': 0.395997} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.176959] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Created Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1008.177693] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ddd8d4-b897-4414-8299-8c9be99b7037 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.370110] env[68638]: DEBUG nova.compute.utils [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1008.374727] env[68638]: DEBUG nova.compute.manager [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1008.374897] env[68638]: DEBUG nova.network.neutron [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1008.379707] env[68638]: DEBUG nova.network.neutron [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1008.451275] env[68638]: DEBUG nova.policy [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '16c7d60e31234230b817fc1778234251', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3e7777e8e5d342d68e2f54e23d125314', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1008.689123] env[68638]: DEBUG nova.network.neutron [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Updating instance_info_cache with network_info: [{"id": "53ebdba3-fcaa-435f-a048-dd22fa9cc3b5", "address": "fa:16:3e:a8:3e:ae", "network": {"id": "5f368894-f202-48ed-bdd5-62442b47a35d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2025484418-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e7777e8e5d342d68e2f54e23d125314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53ebdba3-fc", "ovs_interfaceid": "53ebdba3-fcaa-435f-a048-dd22fa9cc3b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.700688] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Creating linked-clone VM from snapshot {{(pid=68638) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1008.704290] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f5b75e4b-ad8b-4596-9cf5-267a11ba63dd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.716300] env[68638]: DEBUG oslo_vmware.api [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1008.716300] env[68638]: value = "task-2834081" [ 1008.716300] env[68638]: _type = "Task" [ 1008.716300] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.732930] env[68638]: DEBUG oslo_vmware.api [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834081, 'name': CloneVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.765922] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b1e2a7-7ff0-4495-9c4c-61d2e1b5a094 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.771311] env[68638]: DEBUG nova.compute.manager [req-0afb0657-0925-4e94-939d-bbda972a296a req-d0078893-be28-47ac-917a-a0d12393ecf7 service nova] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Received event network-changed-53ebdba3-fcaa-435f-a048-dd22fa9cc3b5 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1008.771528] env[68638]: DEBUG nova.compute.manager [req-0afb0657-0925-4e94-939d-bbda972a296a req-d0078893-be28-47ac-917a-a0d12393ecf7 service nova] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Refreshing instance network info cache due to event network-changed-53ebdba3-fcaa-435f-a048-dd22fa9cc3b5. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1008.771785] env[68638]: DEBUG oslo_concurrency.lockutils [req-0afb0657-0925-4e94-939d-bbda972a296a req-d0078893-be28-47ac-917a-a0d12393ecf7 service nova] Acquiring lock "refresh_cache-df2e066d-7c71-4aec-ab9b-a339a7ff21fb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.777918] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c8eff88-8806-4926-8ad0-6e0b6778a7ad {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.813689] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d400fb-cab4-4449-8ed2-c1e9fed78429 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.821441] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceae42e5-21c6-49e2-98d7-d2e9faa2243f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.835196] env[68638]: DEBUG nova.compute.provider_tree [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1008.876118] env[68638]: DEBUG nova.compute.manager [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1008.893987] env[68638]: DEBUG nova.network.neutron [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Successfully created port: 5b775bb7-0c12-417b-8cd1-bc0089e8658c {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1009.205326] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Releasing lock "refresh_cache-df2e066d-7c71-4aec-ab9b-a339a7ff21fb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1009.205677] env[68638]: DEBUG nova.compute.manager [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Instance network_info: |[{"id": "53ebdba3-fcaa-435f-a048-dd22fa9cc3b5", "address": "fa:16:3e:a8:3e:ae", "network": {"id": "5f368894-f202-48ed-bdd5-62442b47a35d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2025484418-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e7777e8e5d342d68e2f54e23d125314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53ebdba3-fc", "ovs_interfaceid": "53ebdba3-fcaa-435f-a048-dd22fa9cc3b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1009.206038] env[68638]: DEBUG oslo_concurrency.lockutils [req-0afb0657-0925-4e94-939d-bbda972a296a req-d0078893-be28-47ac-917a-a0d12393ecf7 service nova] Acquired lock "refresh_cache-df2e066d-7c71-4aec-ab9b-a339a7ff21fb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1009.206233] env[68638]: DEBUG nova.network.neutron [req-0afb0657-0925-4e94-939d-bbda972a296a req-d0078893-be28-47ac-917a-a0d12393ecf7 service nova] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Refreshing network info cache for port 53ebdba3-fcaa-435f-a048-dd22fa9cc3b5 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1009.207561] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:3e:ae', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2907cce-d529-4809-af05-d29397bed211', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '53ebdba3-fcaa-435f-a048-dd22fa9cc3b5', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1009.216517] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Creating folder: Project (3e7777e8e5d342d68e2f54e23d125314). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1009.217681] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bced61f0-1afc-471b-a941-ba5d7cc3a829 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.229475] env[68638]: DEBUG oslo_vmware.api [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834081, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.232911] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Created folder: Project (3e7777e8e5d342d68e2f54e23d125314) in parent group-v569734. [ 1009.233155] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Creating folder: Instances. Parent ref: group-v569983. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1009.233429] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3348f51b-dbd4-426d-bc94-eaee690e870b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.236728] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fbcf870a-4d4b-4e6b-8575-86440abb488f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "90c192bd-b823-414c-b793-260eacc9904f" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.236977] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fbcf870a-4d4b-4e6b-8575-86440abb488f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "90c192bd-b823-414c-b793-260eacc9904f" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.243319] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Created folder: Instances in parent group-v569983. [ 1009.243570] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1009.243920] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1009.244011] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-219258e0-89ae-47cc-ada9-3e01b4f85257 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.265196] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1009.265196] env[68638]: value = "task-2834084" [ 1009.265196] env[68638]: _type = "Task" [ 1009.265196] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.273186] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834084, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.339235] env[68638]: DEBUG nova.scheduler.client.report [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1009.730102] env[68638]: DEBUG oslo_vmware.api [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834081, 'name': CloneVM_Task} progress is 95%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.739623] env[68638]: INFO nova.compute.manager [None req-fbcf870a-4d4b-4e6b-8575-86440abb488f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Detaching volume eb98862f-bff4-43a3-b7cb-9025589cf53e [ 1009.775193] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834084, 'name': CreateVM_Task, 'duration_secs': 0.452605} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.775412] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1009.776224] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.776325] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1009.776650] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1009.777672] env[68638]: INFO nova.virt.block_device [None req-fbcf870a-4d4b-4e6b-8575-86440abb488f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Attempting to driver detach volume eb98862f-bff4-43a3-b7cb-9025589cf53e from mountpoint /dev/sdb [ 1009.777904] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbcf870a-4d4b-4e6b-8575-86440abb488f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Volume detach. Driver type: vmdk {{(pid=68638) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1009.778146] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbcf870a-4d4b-4e6b-8575-86440abb488f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569960', 'volume_id': 'eb98862f-bff4-43a3-b7cb-9025589cf53e', 'name': 'volume-eb98862f-bff4-43a3-b7cb-9025589cf53e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '90c192bd-b823-414c-b793-260eacc9904f', 'attached_at': '', 'detached_at': '', 'volume_id': 'eb98862f-bff4-43a3-b7cb-9025589cf53e', 'serial': 'eb98862f-bff4-43a3-b7cb-9025589cf53e'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1009.778513] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99a96a87-5a3a-49d7-95dc-2555b2e48c8e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.780865] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ecd2133-9171-4ce3-a117-5833456cbda6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.786146] env[68638]: DEBUG oslo_vmware.api [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1009.786146] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52eca26e-3b0d-3aec-8309-9e19f459550a" [ 1009.786146] env[68638]: _type = "Task" [ 1009.786146] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.811039] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02201f8d-fd7d-4d9f-904a-ec8b1fc97575 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.819554] env[68638]: DEBUG oslo_vmware.api [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52eca26e-3b0d-3aec-8309-9e19f459550a, 'name': SearchDatastore_Task, 'duration_secs': 0.011171} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.821357] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1009.821597] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1009.821860] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.822035] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1009.822227] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1009.822650] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f175c0d5-b3ac-4357-b891-b7034d5651db {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.824828] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88bc0073-68df-4d02-81ff-c4be13390f53 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.846473] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.980s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1009.849515] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 19.159s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.851724] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cada2dd-9bce-49b4-b016-3c61ad493125 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.854212] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1009.854377] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1009.855331] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd12a79c-885e-412d-8a7f-ebc78d07596d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.862084] env[68638]: DEBUG oslo_vmware.api [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1009.862084] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]527cdd53-622a-492d-9869-e9fd67dadf4f" [ 1009.862084] env[68638]: _type = "Task" [ 1009.862084] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.872678] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbcf870a-4d4b-4e6b-8575-86440abb488f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] The volume has not been displaced from its original location: [datastore1] volume-eb98862f-bff4-43a3-b7cb-9025589cf53e/volume-eb98862f-bff4-43a3-b7cb-9025589cf53e.vmdk. No consolidation needed. {{(pid=68638) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1009.878014] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbcf870a-4d4b-4e6b-8575-86440abb488f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Reconfiguring VM instance instance-00000033 to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1009.879219] env[68638]: INFO nova.scheduler.client.report [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Deleted allocations for instance ea8f58dc-1542-4723-bf86-369d4dff5f25 [ 1009.886844] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4564b75a-abaa-4330-8098-6698d96f8e92 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.900144] env[68638]: DEBUG nova.compute.manager [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1009.913349] env[68638]: DEBUG oslo_vmware.api [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527cdd53-622a-492d-9869-e9fd67dadf4f, 'name': SearchDatastore_Task, 'duration_secs': 0.009781} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.915327] env[68638]: DEBUG oslo_vmware.api [None req-fbcf870a-4d4b-4e6b-8575-86440abb488f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 1009.915327] env[68638]: value = "task-2834085" [ 1009.915327] env[68638]: _type = "Task" [ 1009.915327] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.915547] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1177187-9b86-4938-9486-8efe8b455083 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.928682] env[68638]: DEBUG oslo_vmware.api [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1009.928682] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]526f0f7f-536e-56fb-50d5-d8ad596a5d11" [ 1009.928682] env[68638]: _type = "Task" [ 1009.928682] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.928890] env[68638]: DEBUG oslo_vmware.api [None req-fbcf870a-4d4b-4e6b-8575-86440abb488f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834085, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.937400] env[68638]: DEBUG nova.virt.hardware [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1009.937673] env[68638]: DEBUG nova.virt.hardware [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1009.937835] env[68638]: DEBUG nova.virt.hardware [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1009.938064] env[68638]: DEBUG nova.virt.hardware [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1009.938227] env[68638]: DEBUG nova.virt.hardware [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1009.938397] env[68638]: DEBUG nova.virt.hardware [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1009.938625] env[68638]: DEBUG nova.virt.hardware [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1009.938818] env[68638]: DEBUG nova.virt.hardware [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1009.939059] env[68638]: DEBUG nova.virt.hardware [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1009.939212] env[68638]: DEBUG nova.virt.hardware [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1009.939417] env[68638]: DEBUG nova.virt.hardware [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1009.940504] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-204032ad-c741-4393-bc85-90bda1abe7ad {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.955038] env[68638]: DEBUG oslo_vmware.api [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]526f0f7f-536e-56fb-50d5-d8ad596a5d11, 'name': SearchDatastore_Task, 'duration_secs': 0.011882} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.955698] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1009.955984] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] df2e066d-7c71-4aec-ab9b-a339a7ff21fb/df2e066d-7c71-4aec-ab9b-a339a7ff21fb.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1009.956818] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1444303b-0a40-4d97-a703-e0afdd67c390 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.959731] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc85993-3cc1-4855-9383-fa66f5432814 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.975480] env[68638]: DEBUG oslo_vmware.api [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1009.975480] env[68638]: value = "task-2834086" [ 1009.975480] env[68638]: _type = "Task" [ 1009.975480] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.985086] env[68638]: DEBUG oslo_vmware.api [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834086, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.052021] env[68638]: DEBUG nova.network.neutron [req-0afb0657-0925-4e94-939d-bbda972a296a req-d0078893-be28-47ac-917a-a0d12393ecf7 service nova] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Updated VIF entry in instance network info cache for port 53ebdba3-fcaa-435f-a048-dd22fa9cc3b5. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1010.052021] env[68638]: DEBUG nova.network.neutron [req-0afb0657-0925-4e94-939d-bbda972a296a req-d0078893-be28-47ac-917a-a0d12393ecf7 service nova] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Updating instance_info_cache with network_info: [{"id": "53ebdba3-fcaa-435f-a048-dd22fa9cc3b5", "address": "fa:16:3e:a8:3e:ae", "network": {"id": "5f368894-f202-48ed-bdd5-62442b47a35d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2025484418-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e7777e8e5d342d68e2f54e23d125314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53ebdba3-fc", "ovs_interfaceid": "53ebdba3-fcaa-435f-a048-dd22fa9cc3b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.233178] env[68638]: DEBUG oslo_vmware.api [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834081, 'name': CloneVM_Task, 'duration_secs': 1.118501} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.233178] env[68638]: INFO nova.virt.vmwareapi.vmops [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Created linked-clone VM from snapshot [ 1010.233754] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d964dd-6dfd-4ee9-a8e6-1b1d8efc237b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.242936] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Uploading image f510e66f-bc6c-4bb4-af29-0d55b547e445 {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1010.268535] env[68638]: DEBUG oslo_vmware.rw_handles [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1010.268535] env[68638]: value = "vm-569982" [ 1010.268535] env[68638]: _type = "VirtualMachine" [ 1010.268535] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1010.268816] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-695d7e5f-bb76-4e94-b472-a56751a432dc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.277255] env[68638]: DEBUG oslo_vmware.rw_handles [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lease: (returnval){ [ 1010.277255] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ea02d4-d5a5-40b7-c534-ae56ab32037e" [ 1010.277255] env[68638]: _type = "HttpNfcLease" [ 1010.277255] env[68638]: } obtained for exporting VM: (result){ [ 1010.277255] env[68638]: value = "vm-569982" [ 1010.277255] env[68638]: _type = "VirtualMachine" [ 1010.277255] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1010.277651] env[68638]: DEBUG oslo_vmware.api [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the lease: (returnval){ [ 1010.277651] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ea02d4-d5a5-40b7-c534-ae56ab32037e" [ 1010.277651] env[68638]: _type = "HttpNfcLease" [ 1010.277651] env[68638]: } to be ready. {{(pid=68638) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1010.286009] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1010.286009] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ea02d4-d5a5-40b7-c534-ae56ab32037e" [ 1010.286009] env[68638]: _type = "HttpNfcLease" [ 1010.286009] env[68638]: } is initializing. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1010.331330] env[68638]: DEBUG nova.compute.manager [req-1a7d2345-caa0-4b92-a3e2-a470daee8988 req-5eced8ef-65d2-4705-a2cc-4a4d587878e1 service nova] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Received event network-vif-plugged-5b775bb7-0c12-417b-8cd1-bc0089e8658c {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1010.331628] env[68638]: DEBUG oslo_concurrency.lockutils [req-1a7d2345-caa0-4b92-a3e2-a470daee8988 req-5eced8ef-65d2-4705-a2cc-4a4d587878e1 service nova] Acquiring lock "7d99d946-f2df-4d31-911f-ac479849b901-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1010.331885] env[68638]: DEBUG oslo_concurrency.lockutils [req-1a7d2345-caa0-4b92-a3e2-a470daee8988 req-5eced8ef-65d2-4705-a2cc-4a4d587878e1 service nova] Lock "7d99d946-f2df-4d31-911f-ac479849b901-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1010.332367] env[68638]: DEBUG oslo_concurrency.lockutils [req-1a7d2345-caa0-4b92-a3e2-a470daee8988 req-5eced8ef-65d2-4705-a2cc-4a4d587878e1 service nova] Lock "7d99d946-f2df-4d31-911f-ac479849b901-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1010.332606] env[68638]: DEBUG nova.compute.manager [req-1a7d2345-caa0-4b92-a3e2-a470daee8988 req-5eced8ef-65d2-4705-a2cc-4a4d587878e1 service nova] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] No waiting events found dispatching network-vif-plugged-5b775bb7-0c12-417b-8cd1-bc0089e8658c {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1010.332834] env[68638]: WARNING nova.compute.manager [req-1a7d2345-caa0-4b92-a3e2-a470daee8988 req-5eced8ef-65d2-4705-a2cc-4a4d587878e1 service nova] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Received unexpected event network-vif-plugged-5b775bb7-0c12-417b-8cd1-bc0089e8658c for instance with vm_state building and task_state spawning. [ 1010.408970] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a141bfb7-f702-4979-8902-3003d35f74fe tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "ea8f58dc-1542-4723-bf86-369d4dff5f25" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.119s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1010.430883] env[68638]: DEBUG oslo_vmware.api [None req-fbcf870a-4d4b-4e6b-8575-86440abb488f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834085, 'name': ReconfigVM_Task, 'duration_secs': 0.256049} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.432176] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbcf870a-4d4b-4e6b-8575-86440abb488f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Reconfigured VM instance instance-00000033 to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1010.438041] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4b3b644-fe56-4cb0-b7cf-241318c0f93d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.457068] env[68638]: DEBUG oslo_vmware.api [None req-fbcf870a-4d4b-4e6b-8575-86440abb488f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 1010.457068] env[68638]: value = "task-2834088" [ 1010.457068] env[68638]: _type = "Task" [ 1010.457068] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.466785] env[68638]: DEBUG oslo_vmware.api [None req-fbcf870a-4d4b-4e6b-8575-86440abb488f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834088, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.485188] env[68638]: DEBUG oslo_vmware.api [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834086, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510499} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.485618] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] df2e066d-7c71-4aec-ab9b-a339a7ff21fb/df2e066d-7c71-4aec-ab9b-a339a7ff21fb.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1010.485977] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1010.486334] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f4f46a49-6b1d-4eeb-aa77-2a82a153dc92 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.492037] env[68638]: DEBUG oslo_vmware.api [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1010.492037] env[68638]: value = "task-2834089" [ 1010.492037] env[68638]: _type = "Task" [ 1010.492037] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.502715] env[68638]: DEBUG oslo_vmware.api [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834089, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.552343] env[68638]: DEBUG oslo_concurrency.lockutils [req-0afb0657-0925-4e94-939d-bbda972a296a req-d0078893-be28-47ac-917a-a0d12393ecf7 service nova] Releasing lock "refresh_cache-df2e066d-7c71-4aec-ab9b-a339a7ff21fb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1010.785129] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1010.785129] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ea02d4-d5a5-40b7-c534-ae56ab32037e" [ 1010.785129] env[68638]: _type = "HttpNfcLease" [ 1010.785129] env[68638]: } is ready. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1010.785600] env[68638]: DEBUG oslo_vmware.rw_handles [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1010.785600] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ea02d4-d5a5-40b7-c534-ae56ab32037e" [ 1010.785600] env[68638]: _type = "HttpNfcLease" [ 1010.785600] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1010.786406] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113e9cce-2ee1-4849-90ff-9447413bf32a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.797178] env[68638]: DEBUG oslo_vmware.rw_handles [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5256a2a9-a957-3d2b-ded0-696cb8d2943f/disk-0.vmdk from lease info. {{(pid=68638) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1010.797917] env[68638]: DEBUG oslo_vmware.rw_handles [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5256a2a9-a957-3d2b-ded0-696cb8d2943f/disk-0.vmdk for reading. {{(pid=68638) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1010.886807] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-259b629b-a746-45f9-bfe9-a6dfa1867379 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.897670] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 423af2cc-4dea-445f-a01c-6d4d57c3f0de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1010.897836] env[68638]: WARNING nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 4edaaa5d-535a-4c63-ab44-724548a0f3eb is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1010.897967] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 2fa9b930-c76c-4cac-a371-a6b9899dc71e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1010.898103] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 1b176c5d-e77c-410b-b282-b7bba65359a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1010.898220] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 90c192bd-b823-414c-b793-260eacc9904f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1010.898337] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 4c954bb4-6291-47d5-a65c-0ad92a0fd193 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1010.898477] env[68638]: WARNING nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1010.898586] env[68638]: WARNING nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 9975e756-b571-4e70-ba50-a6001d0b064c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1010.898700] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 53e92f51-9010-4fb2-89e1-9d16a252ef6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1010.898810] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 0249ffb9-82ed-44db-bb20-e619eaa176dd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1010.898929] env[68638]: WARNING nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 94af9123-435f-4ae4-8b6d-82838df61d4e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1010.899229] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 32d43fce-837d-41d9-be11-a0c3cdb1694b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1010.899229] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance e9b8e5ad-4d47-48ad-995f-b28d0230df0f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1010.899359] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 43e0eed3-bc25-476d-a9ef-6b132514cf90 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1010.899395] env[68638]: WARNING nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance f0598d8d-09a9-44ce-b4d7-cb8830a84b94 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1010.899615] env[68638]: WARNING nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance fd329f9d-daf3-47ff-9c48-e1355fc012f4 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1010.899615] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance da886efd-bca9-45aa-abcc-13832c66a90c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1010.899724] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance df2e066d-7c71-4aec-ab9b-a339a7ff21fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1010.899835] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 7d99d946-f2df-4d31-911f-ac479849b901 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1010.918794] env[68638]: DEBUG nova.network.neutron [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Successfully updated port: 5b775bb7-0c12-417b-8cd1-bc0089e8658c {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1010.968846] env[68638]: DEBUG oslo_vmware.api [None req-fbcf870a-4d4b-4e6b-8575-86440abb488f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834088, 'name': ReconfigVM_Task, 'duration_secs': 0.140334} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.969186] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbcf870a-4d4b-4e6b-8575-86440abb488f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569960', 'volume_id': 'eb98862f-bff4-43a3-b7cb-9025589cf53e', 'name': 'volume-eb98862f-bff4-43a3-b7cb-9025589cf53e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '90c192bd-b823-414c-b793-260eacc9904f', 'attached_at': '', 'detached_at': '', 'volume_id': 'eb98862f-bff4-43a3-b7cb-9025589cf53e', 'serial': 'eb98862f-bff4-43a3-b7cb-9025589cf53e'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1010.982591] env[68638]: DEBUG nova.compute.manager [req-42660b2a-8628-475d-8a0b-9824031e112c req-6b73d0d8-97cf-4c98-b6b4-7d3852880970 service nova] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Received event network-changed-5b775bb7-0c12-417b-8cd1-bc0089e8658c {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1010.983368] env[68638]: DEBUG nova.compute.manager [req-42660b2a-8628-475d-8a0b-9824031e112c req-6b73d0d8-97cf-4c98-b6b4-7d3852880970 service nova] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Refreshing instance network info cache due to event network-changed-5b775bb7-0c12-417b-8cd1-bc0089e8658c. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1010.983368] env[68638]: DEBUG oslo_concurrency.lockutils [req-42660b2a-8628-475d-8a0b-9824031e112c req-6b73d0d8-97cf-4c98-b6b4-7d3852880970 service nova] Acquiring lock "refresh_cache-7d99d946-f2df-4d31-911f-ac479849b901" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.983368] env[68638]: DEBUG oslo_concurrency.lockutils [req-42660b2a-8628-475d-8a0b-9824031e112c req-6b73d0d8-97cf-4c98-b6b4-7d3852880970 service nova] Acquired lock "refresh_cache-7d99d946-f2df-4d31-911f-ac479849b901" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1010.983368] env[68638]: DEBUG nova.network.neutron [req-42660b2a-8628-475d-8a0b-9824031e112c req-6b73d0d8-97cf-4c98-b6b4-7d3852880970 service nova] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Refreshing network info cache for port 5b775bb7-0c12-417b-8cd1-bc0089e8658c {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1011.002859] env[68638]: DEBUG oslo_vmware.api [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834089, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06318} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.003209] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1011.004121] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83bcead-15d1-4ea8-86f3-aa9196deff61 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.030140] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] df2e066d-7c71-4aec-ab9b-a339a7ff21fb/df2e066d-7c71-4aec-ab9b-a339a7ff21fb.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1011.030140] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18a70603-d653-41bd-aaa9-2530fbf05ae4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.055045] env[68638]: DEBUG oslo_vmware.api [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1011.055045] env[68638]: value = "task-2834090" [ 1011.055045] env[68638]: _type = "Task" [ 1011.055045] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.063298] env[68638]: DEBUG oslo_vmware.api [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834090, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.229901] env[68638]: DEBUG oslo_concurrency.lockutils [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquiring lock "32d43fce-837d-41d9-be11-a0c3cdb1694b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.230314] env[68638]: DEBUG oslo_concurrency.lockutils [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "32d43fce-837d-41d9-be11-a0c3cdb1694b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.230588] env[68638]: DEBUG oslo_concurrency.lockutils [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquiring lock "32d43fce-837d-41d9-be11-a0c3cdb1694b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.230797] env[68638]: DEBUG oslo_concurrency.lockutils [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "32d43fce-837d-41d9-be11-a0c3cdb1694b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.230978] env[68638]: DEBUG oslo_concurrency.lockutils [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "32d43fce-837d-41d9-be11-a0c3cdb1694b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1011.233555] env[68638]: INFO nova.compute.manager [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Terminating instance [ 1011.293892] env[68638]: DEBUG oslo_concurrency.lockutils [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquiring lock "e9b8e5ad-4d47-48ad-995f-b28d0230df0f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.293996] env[68638]: DEBUG oslo_concurrency.lockutils [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "e9b8e5ad-4d47-48ad-995f-b28d0230df0f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.294230] env[68638]: DEBUG oslo_concurrency.lockutils [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquiring lock "e9b8e5ad-4d47-48ad-995f-b28d0230df0f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.294556] env[68638]: DEBUG oslo_concurrency.lockutils [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "e9b8e5ad-4d47-48ad-995f-b28d0230df0f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.294660] env[68638]: DEBUG oslo_concurrency.lockutils [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "e9b8e5ad-4d47-48ad-995f-b28d0230df0f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1011.296829] env[68638]: INFO nova.compute.manager [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Terminating instance [ 1011.402516] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance e7559933-fecc-4eb6-ba71-a295fba684e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1011.427991] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "refresh_cache-7d99d946-f2df-4d31-911f-ac479849b901" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.524248] env[68638]: DEBUG nova.objects.instance [None req-fbcf870a-4d4b-4e6b-8575-86440abb488f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lazy-loading 'flavor' on Instance uuid 90c192bd-b823-414c-b793-260eacc9904f {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1011.541401] env[68638]: DEBUG nova.network.neutron [req-42660b2a-8628-475d-8a0b-9824031e112c req-6b73d0d8-97cf-4c98-b6b4-7d3852880970 service nova] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1011.573163] env[68638]: DEBUG oslo_vmware.api [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834090, 'name': ReconfigVM_Task, 'duration_secs': 0.470861} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.574747] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Reconfigured VM instance instance-0000005a to attach disk [datastore1] df2e066d-7c71-4aec-ab9b-a339a7ff21fb/df2e066d-7c71-4aec-ab9b-a339a7ff21fb.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1011.575784] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ca454c35-3a23-41e6-b3b2-d4d0ea6baf2c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.584658] env[68638]: DEBUG oslo_vmware.api [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1011.584658] env[68638]: value = "task-2834091" [ 1011.584658] env[68638]: _type = "Task" [ 1011.584658] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.595400] env[68638]: DEBUG oslo_vmware.api [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834091, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.667506] env[68638]: DEBUG nova.network.neutron [req-42660b2a-8628-475d-8a0b-9824031e112c req-6b73d0d8-97cf-4c98-b6b4-7d3852880970 service nova] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.740434] env[68638]: DEBUG nova.compute.manager [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1011.740691] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1011.741627] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce11482-8352-48f5-8721-c2f299e26edf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.749988] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1011.750423] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa03d741-0d3e-43b6-b862-4ed64af49635 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.757247] env[68638]: DEBUG oslo_vmware.api [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 1011.757247] env[68638]: value = "task-2834092" [ 1011.757247] env[68638]: _type = "Task" [ 1011.757247] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.765148] env[68638]: DEBUG oslo_vmware.api [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834092, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.801181] env[68638]: DEBUG nova.compute.manager [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1011.801519] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1011.802359] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70424d3c-5d20-494f-9528-6b79dfeca7b6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.810441] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1011.810902] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-54aaa25d-2546-4b28-8213-60c932aa310b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.818401] env[68638]: DEBUG oslo_vmware.api [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 1011.818401] env[68638]: value = "task-2834093" [ 1011.818401] env[68638]: _type = "Task" [ 1011.818401] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.826950] env[68638]: DEBUG oslo_vmware.api [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834093, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.905956] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance dcaef2e3-eb23-4a0b-b617-2880084e03ab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.097575] env[68638]: DEBUG oslo_vmware.api [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834091, 'name': Rename_Task, 'duration_secs': 0.256315} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.097962] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1012.098324] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9e537779-01b9-49fd-bbcc-814cdc139c2d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.105994] env[68638]: DEBUG oslo_vmware.api [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1012.105994] env[68638]: value = "task-2834094" [ 1012.105994] env[68638]: _type = "Task" [ 1012.105994] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.122879] env[68638]: DEBUG oslo_vmware.api [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834094, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.171945] env[68638]: DEBUG oslo_concurrency.lockutils [req-42660b2a-8628-475d-8a0b-9824031e112c req-6b73d0d8-97cf-4c98-b6b4-7d3852880970 service nova] Releasing lock "refresh_cache-7d99d946-f2df-4d31-911f-ac479849b901" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1012.172527] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquired lock "refresh_cache-7d99d946-f2df-4d31-911f-ac479849b901" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1012.172923] env[68638]: DEBUG nova.network.neutron [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1012.268608] env[68638]: DEBUG oslo_vmware.api [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834092, 'name': PowerOffVM_Task, 'duration_secs': 0.299575} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.269010] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1012.269222] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1012.269540] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-83f8a4b1-fe34-4d27-9054-46b193ce009a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.333698] env[68638]: DEBUG oslo_vmware.api [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834093, 'name': PowerOffVM_Task, 'duration_secs': 0.24768} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.334104] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1012.334420] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1012.335420] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4bb9d7c5-8d72-4c0a-8626-4c1dfc6ee843 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.337983] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1012.338204] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1012.338543] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Deleting the datastore file [datastore2] 32d43fce-837d-41d9-be11-a0c3cdb1694b {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1012.339357] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a7d3d8d4-6eea-449f-a06e-293d77d65e8a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.348145] env[68638]: DEBUG oslo_vmware.api [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 1012.348145] env[68638]: value = "task-2834097" [ 1012.348145] env[68638]: _type = "Task" [ 1012.348145] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.358107] env[68638]: DEBUG oslo_vmware.api [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834097, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.407107] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1012.407430] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1012.407825] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Deleting the datastore file [datastore2] e9b8e5ad-4d47-48ad-995f-b28d0230df0f {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1012.408803] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 4765bf70-1a72-4102-b5d3-ccedb7c383ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.413377] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-afb7e9a6-41dc-4150-8348-a66a5a1e2c80 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.421830] env[68638]: DEBUG oslo_vmware.api [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for the task: (returnval){ [ 1012.421830] env[68638]: value = "task-2834098" [ 1012.421830] env[68638]: _type = "Task" [ 1012.421830] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.431970] env[68638]: DEBUG oslo_vmware.api [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834098, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.493049] env[68638]: DEBUG oslo_concurrency.lockutils [None req-81773246-cd83-4dd0-9b1f-20841ae293b8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "90c192bd-b823-414c-b793-260eacc9904f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.534083] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fbcf870a-4d4b-4e6b-8575-86440abb488f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "90c192bd-b823-414c-b793-260eacc9904f" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.297s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.535952] env[68638]: DEBUG oslo_concurrency.lockutils [None req-81773246-cd83-4dd0-9b1f-20841ae293b8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "90c192bd-b823-414c-b793-260eacc9904f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.043s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.535952] env[68638]: DEBUG nova.compute.manager [None req-81773246-cd83-4dd0-9b1f-20841ae293b8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1012.536959] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c6579e5-6cb0-452f-8985-dd93a05ab2c6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.544468] env[68638]: DEBUG nova.compute.manager [None req-81773246-cd83-4dd0-9b1f-20841ae293b8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68638) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1012.545202] env[68638]: DEBUG nova.objects.instance [None req-81773246-cd83-4dd0-9b1f-20841ae293b8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lazy-loading 'flavor' on Instance uuid 90c192bd-b823-414c-b793-260eacc9904f {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1012.615710] env[68638]: DEBUG oslo_vmware.api [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834094, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.712435] env[68638]: DEBUG nova.network.neutron [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1012.853360] env[68638]: DEBUG nova.network.neutron [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Updating instance_info_cache with network_info: [{"id": "5b775bb7-0c12-417b-8cd1-bc0089e8658c", "address": "fa:16:3e:83:bb:36", "network": {"id": "5f368894-f202-48ed-bdd5-62442b47a35d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2025484418-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e7777e8e5d342d68e2f54e23d125314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b775bb7-0c", "ovs_interfaceid": "5b775bb7-0c12-417b-8cd1-bc0089e8658c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.859716] env[68638]: DEBUG oslo_vmware.api [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834097, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164383} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.859958] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1012.860153] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1012.860332] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1012.860508] env[68638]: INFO nova.compute.manager [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1012.860740] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1012.860925] env[68638]: DEBUG nova.compute.manager [-] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1012.861028] env[68638]: DEBUG nova.network.neutron [-] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1012.918681] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 2cdcff10-089b-47fd-ba41-2e3a75cd33b0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.931173] env[68638]: DEBUG oslo_vmware.api [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Task: {'id': task-2834098, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157863} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.931553] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1012.931616] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1012.931745] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1012.931912] env[68638]: INFO nova.compute.manager [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1012.932318] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1012.932868] env[68638]: DEBUG nova.compute.manager [-] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1012.932971] env[68638]: DEBUG nova.network.neutron [-] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1013.117033] env[68638]: DEBUG oslo_vmware.api [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834094, 'name': PowerOnVM_Task, 'duration_secs': 0.588877} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.117033] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1013.117033] env[68638]: INFO nova.compute.manager [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Took 6.73 seconds to spawn the instance on the hypervisor. [ 1013.117033] env[68638]: DEBUG nova.compute.manager [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1013.117566] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6661d5-7739-4cf0-86ff-6f8fec68479d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.356181] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Releasing lock "refresh_cache-7d99d946-f2df-4d31-911f-ac479849b901" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1013.356624] env[68638]: DEBUG nova.compute.manager [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Instance network_info: |[{"id": "5b775bb7-0c12-417b-8cd1-bc0089e8658c", "address": "fa:16:3e:83:bb:36", "network": {"id": "5f368894-f202-48ed-bdd5-62442b47a35d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2025484418-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e7777e8e5d342d68e2f54e23d125314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b775bb7-0c", "ovs_interfaceid": "5b775bb7-0c12-417b-8cd1-bc0089e8658c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1013.357108] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:bb:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2907cce-d529-4809-af05-d29397bed211', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5b775bb7-0c12-417b-8cd1-bc0089e8658c', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1013.369296] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1013.369546] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1013.369785] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea7916b4-3fed-4aa8-aedd-ef4b2fc4c01d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.392536] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1013.392536] env[68638]: value = "task-2834099" [ 1013.392536] env[68638]: _type = "Task" [ 1013.392536] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.402612] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834099, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.422282] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 71ec29a8-5e2f-4ccd-9c22-d9721c77622e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1013.548142] env[68638]: DEBUG nova.compute.manager [req-16890146-efae-4db1-9de6-c2096010fa95 req-4454f73a-3d3b-4de0-aa48-7319a2086d5b service nova] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Received event network-vif-deleted-3efe2356-fa43-4dca-b3ba-854d3d82e4cf {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1013.548375] env[68638]: INFO nova.compute.manager [req-16890146-efae-4db1-9de6-c2096010fa95 req-4454f73a-3d3b-4de0-aa48-7319a2086d5b service nova] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Neutron deleted interface 3efe2356-fa43-4dca-b3ba-854d3d82e4cf; detaching it from the instance and deleting it from the info cache [ 1013.548561] env[68638]: DEBUG nova.network.neutron [req-16890146-efae-4db1-9de6-c2096010fa95 req-4454f73a-3d3b-4de0-aa48-7319a2086d5b service nova] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.554785] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-81773246-cd83-4dd0-9b1f-20841ae293b8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1013.555090] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ede90629-f471-44f5-9ef1-1c3fb7dc7b22 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.562800] env[68638]: DEBUG oslo_vmware.api [None req-81773246-cd83-4dd0-9b1f-20841ae293b8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 1013.562800] env[68638]: value = "task-2834100" [ 1013.562800] env[68638]: _type = "Task" [ 1013.562800] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.571693] env[68638]: DEBUG oslo_vmware.api [None req-81773246-cd83-4dd0-9b1f-20841ae293b8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834100, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.635390] env[68638]: INFO nova.compute.manager [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Took 24.87 seconds to build instance. [ 1013.902992] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834099, 'name': CreateVM_Task, 'duration_secs': 0.355807} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.903427] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1013.904022] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.904234] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1013.904630] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1013.904957] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54560f28-1023-4846-b91b-658157a02e61 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.910389] env[68638]: DEBUG oslo_vmware.api [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1013.910389] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5221cf81-679d-ae12-0e52-255f0c86e411" [ 1013.910389] env[68638]: _type = "Task" [ 1013.910389] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.918667] env[68638]: DEBUG oslo_vmware.api [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5221cf81-679d-ae12-0e52-255f0c86e411, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.925604] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 9ddb29ae-9724-4712-af58-4b8d6546c6af has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1013.925870] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1013.926029] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3008MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1013.954436] env[68638]: DEBUG nova.network.neutron [-] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.023378] env[68638]: DEBUG nova.network.neutron [-] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.054269] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6911c2c2-0684-442f-b292-a441cfba32d8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.064888] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb67e5f-20a1-4053-a69e-a0be1e0a140d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.088418] env[68638]: DEBUG oslo_vmware.api [None req-81773246-cd83-4dd0-9b1f-20841ae293b8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834100, 'name': PowerOffVM_Task, 'duration_secs': 0.195894} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.088835] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-81773246-cd83-4dd0-9b1f-20841ae293b8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1014.089180] env[68638]: DEBUG nova.compute.manager [None req-81773246-cd83-4dd0-9b1f-20841ae293b8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1014.092551] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74618934-b24a-4505-93ec-f8045d32f590 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.108684] env[68638]: DEBUG nova.compute.manager [req-16890146-efae-4db1-9de6-c2096010fa95 req-4454f73a-3d3b-4de0-aa48-7319a2086d5b service nova] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Detach interface failed, port_id=3efe2356-fa43-4dca-b3ba-854d3d82e4cf, reason: Instance 32d43fce-837d-41d9-be11-a0c3cdb1694b could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1014.137465] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ad840ea6-99cc-44b9-b137-4541a558920d tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "df2e066d-7c71-4aec-ab9b-a339a7ff21fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.381s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.292842] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72cf4882-8b55-4b6b-852e-85728b9f4d3a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.301882] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a102ba-0acf-4f16-810e-21149b44dbbc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.332797] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb31d0e0-69d7-4ca1-a958-5b88783f5696 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.340375] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b59c4d-ee5b-41ef-a37e-f13aae47ebae {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.353558] env[68638]: DEBUG nova.compute.provider_tree [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1014.420782] env[68638]: DEBUG oslo_vmware.api [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5221cf81-679d-ae12-0e52-255f0c86e411, 'name': SearchDatastore_Task, 'duration_secs': 0.019741} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.421123] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1014.421371] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1014.421614] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.421758] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1014.421938] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1014.422224] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d61c0c4e-7e25-43de-b9a1-a4fee8d158c3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.430745] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1014.431321] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1014.431725] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82f912d1-b8bd-4071-a170-b5c5ced6e4f6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.437876] env[68638]: DEBUG oslo_vmware.api [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1014.437876] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52391a71-7914-b5d0-dd86-767355f674ec" [ 1014.437876] env[68638]: _type = "Task" [ 1014.437876] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.449531] env[68638]: DEBUG oslo_vmware.api [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52391a71-7914-b5d0-dd86-767355f674ec, 'name': SearchDatastore_Task, 'duration_secs': 0.009267} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.450533] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-544f8437-1854-400d-82b8-a5def2ddefc5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.456298] env[68638]: DEBUG oslo_vmware.api [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1014.456298] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]523b3ca7-d8f4-dc6f-82fd-48d3ed21c0e0" [ 1014.456298] env[68638]: _type = "Task" [ 1014.456298] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.457209] env[68638]: INFO nova.compute.manager [-] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Took 1.60 seconds to deallocate network for instance. [ 1014.469034] env[68638]: DEBUG oslo_vmware.api [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523b3ca7-d8f4-dc6f-82fd-48d3ed21c0e0, 'name': SearchDatastore_Task, 'duration_secs': 0.009586} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.472377] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1014.472632] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 7d99d946-f2df-4d31-911f-ac479849b901/7d99d946-f2df-4d31-911f-ac479849b901.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1014.473207] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0120c4bc-8b2d-4035-9e55-7e577a8b4c6b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.480761] env[68638]: DEBUG oslo_vmware.api [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1014.480761] env[68638]: value = "task-2834101" [ 1014.480761] env[68638]: _type = "Task" [ 1014.480761] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.488959] env[68638]: DEBUG oslo_vmware.api [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834101, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.527726] env[68638]: INFO nova.compute.manager [-] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Took 1.59 seconds to deallocate network for instance. [ 1014.619907] env[68638]: DEBUG oslo_concurrency.lockutils [None req-81773246-cd83-4dd0-9b1f-20841ae293b8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "90c192bd-b823-414c-b793-260eacc9904f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.084s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.856873] env[68638]: DEBUG nova.scheduler.client.report [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1014.974843] env[68638]: DEBUG oslo_concurrency.lockutils [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1014.990308] env[68638]: DEBUG oslo_vmware.api [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834101, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.034356] env[68638]: DEBUG oslo_concurrency.lockutils [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.145711] env[68638]: DEBUG nova.objects.instance [None req-a79ec540-bb55-4c57-89bc-57cc7bb4c7d8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lazy-loading 'flavor' on Instance uuid 90c192bd-b823-414c-b793-260eacc9904f {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1015.363933] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68638) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1015.364233] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.515s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.364531] env[68638]: DEBUG oslo_concurrency.lockutils [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.495s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.366180] env[68638]: INFO nova.compute.claims [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1015.492814] env[68638]: DEBUG oslo_vmware.api [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834101, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517249} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.493169] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 7d99d946-f2df-4d31-911f-ac479849b901/7d99d946-f2df-4d31-911f-ac479849b901.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1015.493450] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1015.493742] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bc74f57c-3b26-40c3-bca9-3cac54dc0e1e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.501039] env[68638]: DEBUG oslo_vmware.api [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1015.501039] env[68638]: value = "task-2834102" [ 1015.501039] env[68638]: _type = "Task" [ 1015.501039] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.512220] env[68638]: DEBUG oslo_vmware.api [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834102, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.605946] env[68638]: DEBUG nova.compute.manager [req-f60e9c1f-0ae6-4206-b845-c727c07213c3 req-ef47f7a2-507a-45f5-a5e3-759554b682d5 service nova] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Received event network-vif-deleted-02b188c7-b3e5-45fe-b1c8-8af03b12180b {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1015.650731] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a79ec540-bb55-4c57-89bc-57cc7bb4c7d8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "refresh_cache-90c192bd-b823-414c-b793-260eacc9904f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.650918] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a79ec540-bb55-4c57-89bc-57cc7bb4c7d8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquired lock "refresh_cache-90c192bd-b823-414c-b793-260eacc9904f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1015.651072] env[68638]: DEBUG nova.network.neutron [None req-a79ec540-bb55-4c57-89bc-57cc7bb4c7d8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1015.651249] env[68638]: DEBUG nova.objects.instance [None req-a79ec540-bb55-4c57-89bc-57cc7bb4c7d8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lazy-loading 'info_cache' on Instance uuid 90c192bd-b823-414c-b793-260eacc9904f {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1016.011405] env[68638]: DEBUG oslo_vmware.api [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834102, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.123985} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.011703] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1016.012410] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-139d67fa-60e5-45b0-b609-614c86cf86a2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.035609] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 7d99d946-f2df-4d31-911f-ac479849b901/7d99d946-f2df-4d31-911f-ac479849b901.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1016.035885] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f24f9602-7f27-48b5-911e-1f3a7543c502 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.055814] env[68638]: DEBUG oslo_vmware.api [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1016.055814] env[68638]: value = "task-2834103" [ 1016.055814] env[68638]: _type = "Task" [ 1016.055814] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.064855] env[68638]: DEBUG oslo_vmware.api [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834103, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.156992] env[68638]: DEBUG nova.objects.base [None req-a79ec540-bb55-4c57-89bc-57cc7bb4c7d8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Object Instance<90c192bd-b823-414c-b793-260eacc9904f> lazy-loaded attributes: flavor,info_cache {{(pid=68638) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1016.565227] env[68638]: DEBUG oslo_vmware.api [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834103, 'name': ReconfigVM_Task, 'duration_secs': 0.285397} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.567649] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 7d99d946-f2df-4d31-911f-ac479849b901/7d99d946-f2df-4d31-911f-ac479849b901.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1016.568464] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3959b42a-2ba6-4387-969f-31d0feace672 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.575199] env[68638]: DEBUG oslo_vmware.api [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1016.575199] env[68638]: value = "task-2834104" [ 1016.575199] env[68638]: _type = "Task" [ 1016.575199] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.585119] env[68638]: DEBUG oslo_vmware.api [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834104, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.684889] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb7f41a-8a99-4632-a868-3fdfd9e311bf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.695771] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-426d6ea6-8284-44b6-b309-cd2683847435 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.726505] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bf48a9b-d6d1-4fff-bdbb-cc44f0ac58bc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.734490] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d6409f4-8205-4801-a87c-f085d49ee11a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.749780] env[68638]: DEBUG nova.compute.provider_tree [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1016.934075] env[68638]: DEBUG nova.network.neutron [None req-a79ec540-bb55-4c57-89bc-57cc7bb4c7d8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Updating instance_info_cache with network_info: [{"id": "d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3", "address": "fa:16:3e:9a:9b:14", "network": {"id": "c31bf1cd-7568-43c6-9d99-a1e4d63a62a6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1277511990-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa16293a678b4a35ac0837f6ce904e48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2378ad7-a6", "ovs_interfaceid": "d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.085946] env[68638]: DEBUG oslo_vmware.api [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834104, 'name': Rename_Task, 'duration_secs': 0.147889} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.086294] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1017.086533] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-67950233-ac0c-4721-af1b-8f9e5987ccb6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.093433] env[68638]: DEBUG oslo_vmware.api [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1017.093433] env[68638]: value = "task-2834105" [ 1017.093433] env[68638]: _type = "Task" [ 1017.093433] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.100928] env[68638]: DEBUG oslo_vmware.api [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834105, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.255024] env[68638]: DEBUG nova.scheduler.client.report [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1017.437778] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a79ec540-bb55-4c57-89bc-57cc7bb4c7d8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Releasing lock "refresh_cache-90c192bd-b823-414c-b793-260eacc9904f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1017.604274] env[68638]: DEBUG oslo_vmware.api [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834105, 'name': PowerOnVM_Task, 'duration_secs': 0.44595} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.604566] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1017.604773] env[68638]: INFO nova.compute.manager [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Took 7.70 seconds to spawn the instance on the hypervisor. [ 1017.604950] env[68638]: DEBUG nova.compute.manager [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1017.605791] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-577d6fd5-c5d6-4c50-ae27-5d9dde0da38c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.760673] env[68638]: DEBUG oslo_concurrency.lockutils [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.396s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1017.761406] env[68638]: DEBUG nova.compute.manager [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1017.763868] env[68638]: DEBUG oslo_concurrency.lockutils [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.744s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.764581] env[68638]: DEBUG oslo_concurrency.lockutils [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1017.766182] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.110s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.767598] env[68638]: INFO nova.compute.claims [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1017.786914] env[68638]: INFO nova.scheduler.client.report [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Deleted allocations for instance 94af9123-435f-4ae4-8b6d-82838df61d4e [ 1018.125072] env[68638]: INFO nova.compute.manager [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Took 28.82 seconds to build instance. [ 1018.271899] env[68638]: DEBUG nova.compute.utils [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1018.274994] env[68638]: DEBUG nova.compute.manager [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1018.275187] env[68638]: DEBUG nova.network.neutron [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1018.295474] env[68638]: DEBUG oslo_concurrency.lockutils [None req-eb4cabe6-b6d2-41e4-adeb-80022871482a tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "94af9123-435f-4ae4-8b6d-82838df61d4e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.576s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1018.343142] env[68638]: DEBUG nova.policy [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '847f535ec96f4ef0b73ae277199b4533', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98a35cb6ae4d4c8688fb89d7da0b2dd1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1018.444927] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a79ec540-bb55-4c57-89bc-57cc7bb4c7d8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1018.444927] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-152aa1ce-f915-4644-81c0-957d02c5f568 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.455535] env[68638]: DEBUG oslo_vmware.api [None req-a79ec540-bb55-4c57-89bc-57cc7bb4c7d8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 1018.455535] env[68638]: value = "task-2834106" [ 1018.455535] env[68638]: _type = "Task" [ 1018.455535] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.465635] env[68638]: DEBUG oslo_vmware.api [None req-a79ec540-bb55-4c57-89bc-57cc7bb4c7d8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834106, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.628410] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1900948d-c8ca-4657-bc71-69c0eaff3a7e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "7d99d946-f2df-4d31-911f-ac479849b901" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.332s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1018.645891] env[68638]: DEBUG nova.network.neutron [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Successfully created port: 637e93a8-5c95-4b4b-8681-1ef5669b70f9 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1018.695779] env[68638]: INFO nova.compute.manager [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Rescuing [ 1018.695779] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "refresh_cache-7d99d946-f2df-4d31-911f-ac479849b901" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.695779] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquired lock "refresh_cache-7d99d946-f2df-4d31-911f-ac479849b901" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1018.695779] env[68638]: DEBUG nova.network.neutron [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1018.781580] env[68638]: DEBUG nova.compute.manager [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1018.969746] env[68638]: DEBUG oslo_vmware.api [None req-a79ec540-bb55-4c57-89bc-57cc7bb4c7d8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834106, 'name': PowerOnVM_Task, 'duration_secs': 0.473336} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.970095] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a79ec540-bb55-4c57-89bc-57cc7bb4c7d8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1018.970316] env[68638]: DEBUG nova.compute.manager [None req-a79ec540-bb55-4c57-89bc-57cc7bb4c7d8 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1018.975017] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78030eb7-99b8-4e95-8166-f01100c56996 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.152215] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b595b108-8f7e-4db6-90b9-e331a3ca693d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.160783] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3f5b002-b378-4e9c-b556-7f750837e15e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.194036] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e21d3089-0ada-42cd-b634-656bfdb4e0e3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.204136] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-031c59df-9ec9-4d7a-a1ec-f5454d6bf367 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.218438] env[68638]: DEBUG nova.compute.provider_tree [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1019.431757] env[68638]: DEBUG nova.network.neutron [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Updating instance_info_cache with network_info: [{"id": "5b775bb7-0c12-417b-8cd1-bc0089e8658c", "address": "fa:16:3e:83:bb:36", "network": {"id": "5f368894-f202-48ed-bdd5-62442b47a35d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2025484418-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e7777e8e5d342d68e2f54e23d125314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b775bb7-0c", "ovs_interfaceid": "5b775bb7-0c12-417b-8cd1-bc0089e8658c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.721135] env[68638]: DEBUG nova.scheduler.client.report [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1019.793623] env[68638]: DEBUG nova.compute.manager [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1019.813907] env[68638]: DEBUG nova.virt.hardware [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1019.814165] env[68638]: DEBUG nova.virt.hardware [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1019.814441] env[68638]: DEBUG nova.virt.hardware [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1019.814646] env[68638]: DEBUG nova.virt.hardware [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1019.814794] env[68638]: DEBUG nova.virt.hardware [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1019.814944] env[68638]: DEBUG nova.virt.hardware [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1019.815169] env[68638]: DEBUG nova.virt.hardware [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1019.815393] env[68638]: DEBUG nova.virt.hardware [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1019.815569] env[68638]: DEBUG nova.virt.hardware [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1019.815733] env[68638]: DEBUG nova.virt.hardware [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1019.815904] env[68638]: DEBUG nova.virt.hardware [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1019.816969] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-257ad4d1-5c86-4853-ad36-a2bfa274a7ab {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.825769] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d38cfa7-5fac-4546-8fea-a5386ecdccf4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.934577] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Releasing lock "refresh_cache-7d99d946-f2df-4d31-911f-ac479849b901" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1020.149147] env[68638]: DEBUG nova.compute.manager [req-d3839c01-d06e-407d-837e-ed269abd2e08 req-2fd2af9e-b455-48f0-89a1-fb255a9c501e service nova] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Received event network-vif-plugged-637e93a8-5c95-4b4b-8681-1ef5669b70f9 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1020.149532] env[68638]: DEBUG oslo_concurrency.lockutils [req-d3839c01-d06e-407d-837e-ed269abd2e08 req-2fd2af9e-b455-48f0-89a1-fb255a9c501e service nova] Acquiring lock "e7559933-fecc-4eb6-ba71-a295fba684e4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.150130] env[68638]: DEBUG oslo_concurrency.lockutils [req-d3839c01-d06e-407d-837e-ed269abd2e08 req-2fd2af9e-b455-48f0-89a1-fb255a9c501e service nova] Lock "e7559933-fecc-4eb6-ba71-a295fba684e4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.150323] env[68638]: DEBUG oslo_concurrency.lockutils [req-d3839c01-d06e-407d-837e-ed269abd2e08 req-2fd2af9e-b455-48f0-89a1-fb255a9c501e service nova] Lock "e7559933-fecc-4eb6-ba71-a295fba684e4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.150500] env[68638]: DEBUG nova.compute.manager [req-d3839c01-d06e-407d-837e-ed269abd2e08 req-2fd2af9e-b455-48f0-89a1-fb255a9c501e service nova] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] No waiting events found dispatching network-vif-plugged-637e93a8-5c95-4b4b-8681-1ef5669b70f9 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1020.150691] env[68638]: WARNING nova.compute.manager [req-d3839c01-d06e-407d-837e-ed269abd2e08 req-2fd2af9e-b455-48f0-89a1-fb255a9c501e service nova] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Received unexpected event network-vif-plugged-637e93a8-5c95-4b4b-8681-1ef5669b70f9 for instance with vm_state building and task_state spawning. [ 1020.227264] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.461s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.228084] env[68638]: DEBUG nova.compute.manager [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1020.230832] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.409s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.231056] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.233337] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.251s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.233443] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.235737] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.432s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.237345] env[68638]: INFO nova.compute.claims [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1020.249967] env[68638]: DEBUG nova.network.neutron [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Successfully updated port: 637e93a8-5c95-4b4b-8681-1ef5669b70f9 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1020.264415] env[68638]: INFO nova.scheduler.client.report [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Deleted allocations for instance fd329f9d-daf3-47ff-9c48-e1355fc012f4 [ 1020.271433] env[68638]: INFO nova.scheduler.client.report [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Deleted allocations for instance 4edaaa5d-535a-4c63-ab44-724548a0f3eb [ 1020.495140] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "0be6f174-fad2-4ee3-be07-b6190073b40c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.495527] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "0be6f174-fad2-4ee3-be07-b6190073b40c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.741961] env[68638]: DEBUG nova.compute.utils [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1020.749021] env[68638]: DEBUG nova.compute.manager [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1020.749021] env[68638]: DEBUG nova.network.neutron [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1020.754721] env[68638]: DEBUG oslo_concurrency.lockutils [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "refresh_cache-e7559933-fecc-4eb6-ba71-a295fba684e4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.754721] env[68638]: DEBUG oslo_concurrency.lockutils [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired lock "refresh_cache-e7559933-fecc-4eb6-ba71-a295fba684e4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1020.754721] env[68638]: DEBUG nova.network.neutron [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1020.773255] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9ee6522d-57d7-4bb4-a59f-a4d24b538e6c tempest-ServersAaction247Test-935830101 tempest-ServersAaction247Test-935830101-project-member] Lock "fd329f9d-daf3-47ff-9c48-e1355fc012f4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.056s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.780135] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c71fc5f9-452f-44cc-92bd-12b4c562e5ca tempest-ServersAdminTestJSON-1111482880 tempest-ServersAdminTestJSON-1111482880-project-member] Lock "4edaaa5d-535a-4c63-ab44-724548a0f3eb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.602s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.803858] env[68638]: DEBUG nova.policy [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba0da21142574d2f8f88fa0ff9fd8b59', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de061db065b148bdae3776236a685cef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1020.997707] env[68638]: DEBUG nova.compute.manager [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1021.158389] env[68638]: DEBUG nova.network.neutron [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Successfully created port: afa7c854-c29e-429c-8b58-1c18417595ca {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1021.251683] env[68638]: DEBUG nova.compute.manager [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1021.333357] env[68638]: DEBUG nova.network.neutron [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1021.467047] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1021.467493] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0b7b8ca9-6481-485b-a7e1-0493e2d2deb2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.477050] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1021.477050] env[68638]: value = "task-2834107" [ 1021.477050] env[68638]: _type = "Task" [ 1021.477050] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.495608] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834107, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.524131] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1021.628853] env[68638]: DEBUG oslo_vmware.rw_handles [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5256a2a9-a957-3d2b-ded0-696cb8d2943f/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1021.633261] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc8efd5-15dd-41f8-923e-4e714f239479 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.640174] env[68638]: DEBUG nova.network.neutron [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Updating instance_info_cache with network_info: [{"id": "637e93a8-5c95-4b4b-8681-1ef5669b70f9", "address": "fa:16:3e:bf:5c:34", "network": {"id": "ad22ed5c-0d03-45c8-8bc4-c4f51dbac4fc", "bridge": "br-int", "label": "tempest-ServersTestJSON-2147381832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98a35cb6ae4d4c8688fb89d7da0b2dd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap637e93a8-5c", "ovs_interfaceid": "637e93a8-5c95-4b4b-8681-1ef5669b70f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.646109] env[68638]: DEBUG oslo_vmware.rw_handles [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5256a2a9-a957-3d2b-ded0-696cb8d2943f/disk-0.vmdk is in state: ready. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1021.646109] env[68638]: ERROR oslo_vmware.rw_handles [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5256a2a9-a957-3d2b-ded0-696cb8d2943f/disk-0.vmdk due to incomplete transfer. [ 1021.646109] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-973cf4c9-a884-4e9b-a0b6-f20ae2ca5417 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.653581] env[68638]: DEBUG oslo_vmware.rw_handles [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5256a2a9-a957-3d2b-ded0-696cb8d2943f/disk-0.vmdk. {{(pid=68638) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1021.653890] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Uploaded image f510e66f-bc6c-4bb4-af29-0d55b547e445 to the Glance image server {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1021.656499] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Destroying the VM {{(pid=68638) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1021.659388] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-cc9bcbef-996f-4deb-8486-95a28ebaa582 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.667592] env[68638]: DEBUG oslo_vmware.api [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1021.667592] env[68638]: value = "task-2834108" [ 1021.667592] env[68638]: _type = "Task" [ 1021.667592] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.678206] env[68638]: DEBUG oslo_vmware.api [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834108, 'name': Destroy_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.681176] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f0b7485-3523-40f7-b248-1f21948086be {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.689289] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275d5b62-6ea3-40d2-8fdd-b20030b04c98 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.723541] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-566baca3-f544-46f5-89a3-af2cca0dff20 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.731240] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-050282e0-1cec-43b8-9cfe-3775ef461e4c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.747331] env[68638]: DEBUG nova.compute.provider_tree [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1021.989166] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834107, 'name': PowerOffVM_Task, 'duration_secs': 0.211736} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.989166] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1021.989365] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223d57b0-208f-4ca6-ba0d-ae00cfec09ce {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.010868] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-283e5262-0350-4ac6-9175-895dab7a14f7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.040139] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1022.040874] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-450eca38-0918-4d3f-b19c-8acabaf7d504 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.047443] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1022.047443] env[68638]: value = "task-2834109" [ 1022.047443] env[68638]: _type = "Task" [ 1022.047443] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.054928] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834109, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.144226] env[68638]: DEBUG oslo_concurrency.lockutils [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Releasing lock "refresh_cache-e7559933-fecc-4eb6-ba71-a295fba684e4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1022.144612] env[68638]: DEBUG nova.compute.manager [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Instance network_info: |[{"id": "637e93a8-5c95-4b4b-8681-1ef5669b70f9", "address": "fa:16:3e:bf:5c:34", "network": {"id": "ad22ed5c-0d03-45c8-8bc4-c4f51dbac4fc", "bridge": "br-int", "label": "tempest-ServersTestJSON-2147381832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98a35cb6ae4d4c8688fb89d7da0b2dd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap637e93a8-5c", "ovs_interfaceid": "637e93a8-5c95-4b4b-8681-1ef5669b70f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1022.145060] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:5c:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '46e1fc20-2067-4e1a-9812-702772a2c82c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '637e93a8-5c95-4b4b-8681-1ef5669b70f9', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1022.152953] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1022.153232] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1022.153459] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-280b959b-6d74-435c-ac25-8b897baefdbb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.175030] env[68638]: DEBUG nova.compute.manager [req-0869a978-573c-4df7-b398-bec599ac8360 req-23ad3a9f-b467-4c19-8454-292f37a80b96 service nova] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Received event network-changed-637e93a8-5c95-4b4b-8681-1ef5669b70f9 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1022.175239] env[68638]: DEBUG nova.compute.manager [req-0869a978-573c-4df7-b398-bec599ac8360 req-23ad3a9f-b467-4c19-8454-292f37a80b96 service nova] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Refreshing instance network info cache due to event network-changed-637e93a8-5c95-4b4b-8681-1ef5669b70f9. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1022.175587] env[68638]: DEBUG oslo_concurrency.lockutils [req-0869a978-573c-4df7-b398-bec599ac8360 req-23ad3a9f-b467-4c19-8454-292f37a80b96 service nova] Acquiring lock "refresh_cache-e7559933-fecc-4eb6-ba71-a295fba684e4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.175652] env[68638]: DEBUG oslo_concurrency.lockutils [req-0869a978-573c-4df7-b398-bec599ac8360 req-23ad3a9f-b467-4c19-8454-292f37a80b96 service nova] Acquired lock "refresh_cache-e7559933-fecc-4eb6-ba71-a295fba684e4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1022.175787] env[68638]: DEBUG nova.network.neutron [req-0869a978-573c-4df7-b398-bec599ac8360 req-23ad3a9f-b467-4c19-8454-292f37a80b96 service nova] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Refreshing network info cache for port 637e93a8-5c95-4b4b-8681-1ef5669b70f9 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1022.181355] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1022.181355] env[68638]: value = "task-2834110" [ 1022.181355] env[68638]: _type = "Task" [ 1022.181355] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.181594] env[68638]: DEBUG oslo_vmware.api [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834108, 'name': Destroy_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.192349] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834110, 'name': CreateVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.248410] env[68638]: DEBUG nova.scheduler.client.report [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1022.263756] env[68638]: DEBUG nova.compute.manager [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1022.288342] env[68638]: DEBUG nova.virt.hardware [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1022.288610] env[68638]: DEBUG nova.virt.hardware [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1022.288769] env[68638]: DEBUG nova.virt.hardware [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1022.288953] env[68638]: DEBUG nova.virt.hardware [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1022.289350] env[68638]: DEBUG nova.virt.hardware [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1022.289537] env[68638]: DEBUG nova.virt.hardware [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1022.289759] env[68638]: DEBUG nova.virt.hardware [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1022.290571] env[68638]: DEBUG nova.virt.hardware [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1022.290571] env[68638]: DEBUG nova.virt.hardware [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1022.290571] env[68638]: DEBUG nova.virt.hardware [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1022.290571] env[68638]: DEBUG nova.virt.hardware [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1022.291408] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81616089-ea63-4636-94db-956bf79fabef {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.299429] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca740735-eb9b-4fa1-8e1c-48236c5d90af {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.559025] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] VM already powered off {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1022.559284] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1022.559570] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.559746] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1022.559942] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1022.560238] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d2581c0-b682-482a-a4c6-41c739a74534 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.571316] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1022.571623] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1022.572493] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6fe1fba-19e1-4f97-93df-39899a364b07 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.580857] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1022.580857] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52447996-8122-c394-e600-f29fa0fe0dac" [ 1022.580857] env[68638]: _type = "Task" [ 1022.580857] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.589814] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52447996-8122-c394-e600-f29fa0fe0dac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.679321] env[68638]: DEBUG oslo_vmware.api [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834108, 'name': Destroy_Task, 'duration_secs': 0.956682} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.679605] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Destroyed the VM [ 1022.679879] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Deleting Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1022.680189] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d14f3c29-f7a3-46d4-a0a5-73a6a5d599d6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.690157] env[68638]: DEBUG oslo_vmware.api [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1022.690157] env[68638]: value = "task-2834111" [ 1022.690157] env[68638]: _type = "Task" [ 1022.690157] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.700017] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834110, 'name': CreateVM_Task, 'duration_secs': 0.470853} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.700017] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1022.700017] env[68638]: DEBUG oslo_concurrency.lockutils [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.700017] env[68638]: DEBUG oslo_concurrency.lockutils [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1022.700017] env[68638]: DEBUG oslo_concurrency.lockutils [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1022.702041] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3db5e4e0-af77-40de-a102-3ce431c508ab {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.703869] env[68638]: DEBUG oslo_vmware.api [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834111, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.706678] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1022.706678] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5223a291-cc2f-38f6-66e1-af06836779ab" [ 1022.706678] env[68638]: _type = "Task" [ 1022.706678] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.714088] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5223a291-cc2f-38f6-66e1-af06836779ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.753455] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.518s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.754075] env[68638]: DEBUG nova.compute.manager [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1022.756794] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.520s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.758192] env[68638]: INFO nova.compute.claims [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1023.082554] env[68638]: DEBUG nova.compute.manager [req-48c98a9a-0a8d-4014-908c-90dbc21d3d20 req-4dddc2ef-0932-4bcf-b22d-af5371a75cc7 service nova] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Received event network-vif-plugged-afa7c854-c29e-429c-8b58-1c18417595ca {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1023.082836] env[68638]: DEBUG oslo_concurrency.lockutils [req-48c98a9a-0a8d-4014-908c-90dbc21d3d20 req-4dddc2ef-0932-4bcf-b22d-af5371a75cc7 service nova] Acquiring lock "dcaef2e3-eb23-4a0b-b617-2880084e03ab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.083143] env[68638]: DEBUG oslo_concurrency.lockutils [req-48c98a9a-0a8d-4014-908c-90dbc21d3d20 req-4dddc2ef-0932-4bcf-b22d-af5371a75cc7 service nova] Lock "dcaef2e3-eb23-4a0b-b617-2880084e03ab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1023.083358] env[68638]: DEBUG oslo_concurrency.lockutils [req-48c98a9a-0a8d-4014-908c-90dbc21d3d20 req-4dddc2ef-0932-4bcf-b22d-af5371a75cc7 service nova] Lock "dcaef2e3-eb23-4a0b-b617-2880084e03ab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1023.083513] env[68638]: DEBUG nova.compute.manager [req-48c98a9a-0a8d-4014-908c-90dbc21d3d20 req-4dddc2ef-0932-4bcf-b22d-af5371a75cc7 service nova] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] No waiting events found dispatching network-vif-plugged-afa7c854-c29e-429c-8b58-1c18417595ca {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1023.083681] env[68638]: WARNING nova.compute.manager [req-48c98a9a-0a8d-4014-908c-90dbc21d3d20 req-4dddc2ef-0932-4bcf-b22d-af5371a75cc7 service nova] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Received unexpected event network-vif-plugged-afa7c854-c29e-429c-8b58-1c18417595ca for instance with vm_state building and task_state spawning. [ 1023.093502] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52447996-8122-c394-e600-f29fa0fe0dac, 'name': SearchDatastore_Task, 'duration_secs': 0.015452} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.094283] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51bfaab2-7365-45f6-81fd-db31fc289efb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.101530] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1023.101530] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52abfa91-83cc-ff4d-23ce-d3f8bfa1d2f5" [ 1023.101530] env[68638]: _type = "Task" [ 1023.101530] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.110331] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52abfa91-83cc-ff4d-23ce-d3f8bfa1d2f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.202439] env[68638]: DEBUG oslo_vmware.api [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834111, 'name': RemoveSnapshot_Task, 'duration_secs': 0.370965} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.202826] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Deleted Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1023.203146] env[68638]: DEBUG nova.compute.manager [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1023.203944] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-104a1f59-234d-4935-ae24-51ab9b3c93e9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.226010] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5223a291-cc2f-38f6-66e1-af06836779ab, 'name': SearchDatastore_Task, 'duration_secs': 0.01676} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.226993] env[68638]: DEBUG nova.network.neutron [req-0869a978-573c-4df7-b398-bec599ac8360 req-23ad3a9f-b467-4c19-8454-292f37a80b96 service nova] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Updated VIF entry in instance network info cache for port 637e93a8-5c95-4b4b-8681-1ef5669b70f9. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1023.227334] env[68638]: DEBUG nova.network.neutron [req-0869a978-573c-4df7-b398-bec599ac8360 req-23ad3a9f-b467-4c19-8454-292f37a80b96 service nova] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Updating instance_info_cache with network_info: [{"id": "637e93a8-5c95-4b4b-8681-1ef5669b70f9", "address": "fa:16:3e:bf:5c:34", "network": {"id": "ad22ed5c-0d03-45c8-8bc4-c4f51dbac4fc", "bridge": "br-int", "label": "tempest-ServersTestJSON-2147381832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98a35cb6ae4d4c8688fb89d7da0b2dd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap637e93a8-5c", "ovs_interfaceid": "637e93a8-5c95-4b4b-8681-1ef5669b70f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.228635] env[68638]: DEBUG oslo_concurrency.lockutils [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1023.228863] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1023.229107] env[68638]: DEBUG oslo_concurrency.lockutils [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.229255] env[68638]: DEBUG oslo_concurrency.lockutils [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1023.229434] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1023.229870] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-693e6ece-0ffe-42b6-bef5-10988e2929ef {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.252996] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1023.253215] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1023.254369] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-172f4990-11c6-4962-b769-3e166344022b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.260713] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1023.260713] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52435d3c-375d-cd68-5553-74c7f2b555c4" [ 1023.260713] env[68638]: _type = "Task" [ 1023.260713] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.261912] env[68638]: DEBUG nova.compute.utils [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1023.266036] env[68638]: DEBUG nova.compute.manager [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1023.266295] env[68638]: DEBUG nova.network.neutron [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1023.277700] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52435d3c-375d-cd68-5553-74c7f2b555c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.310174] env[68638]: DEBUG nova.policy [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7569a0fd95c644d38ef18de41870bde4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35fdd5447a0546b7b0fe2ed9ea0efc73', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1023.584310] env[68638]: DEBUG nova.network.neutron [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Successfully created port: f99283c7-566e-4386-b66a-6295a6b67f68 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1023.613526] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52abfa91-83cc-ff4d-23ce-d3f8bfa1d2f5, 'name': SearchDatastore_Task, 'duration_secs': 0.056813} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.613946] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1023.614086] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 7d99d946-f2df-4d31-911f-ac479849b901/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9-rescue.vmdk. {{(pid=68638) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1023.614696] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-47bda82e-921a-4c75-9732-0fdd41000145 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.622037] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1023.622037] env[68638]: value = "task-2834112" [ 1023.622037] env[68638]: _type = "Task" [ 1023.622037] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.630638] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834112, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.730133] env[68638]: INFO nova.compute.manager [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Shelve offloading [ 1023.731723] env[68638]: DEBUG oslo_concurrency.lockutils [req-0869a978-573c-4df7-b398-bec599ac8360 req-23ad3a9f-b467-4c19-8454-292f37a80b96 service nova] Releasing lock "refresh_cache-e7559933-fecc-4eb6-ba71-a295fba684e4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1023.771140] env[68638]: DEBUG nova.compute.manager [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1023.776977] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52435d3c-375d-cd68-5553-74c7f2b555c4, 'name': SearchDatastore_Task, 'duration_secs': 0.038082} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.779275] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad73dcec-2e42-4bfd-abdb-0e7ec6f8dafa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.784964] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1023.784964] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]523adeaf-5fd1-d213-e6a0-d33da6927b2b" [ 1023.784964] env[68638]: _type = "Task" [ 1023.784964] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.793993] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523adeaf-5fd1-d213-e6a0-d33da6927b2b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.900030] env[68638]: DEBUG nova.network.neutron [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Successfully updated port: afa7c854-c29e-429c-8b58-1c18417595ca {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1024.132328] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834112, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.160190] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbcfec49-541d-48d0-9540-8196a2dc286f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.169121] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-744be288-b5a2-47a7-8599-ad9eed687c21 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.202787] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ada0f2b9-5436-4307-82aa-ab8aca46ea3a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.209804] env[68638]: DEBUG nova.compute.manager [req-26d0cd20-6f78-451b-99a9-1452a8f46d41 req-e1d17148-fa8d-4f8a-9044-cc5ce043439b service nova] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Received event network-changed-afa7c854-c29e-429c-8b58-1c18417595ca {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1024.210012] env[68638]: DEBUG nova.compute.manager [req-26d0cd20-6f78-451b-99a9-1452a8f46d41 req-e1d17148-fa8d-4f8a-9044-cc5ce043439b service nova] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Refreshing instance network info cache due to event network-changed-afa7c854-c29e-429c-8b58-1c18417595ca. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1024.210266] env[68638]: DEBUG oslo_concurrency.lockutils [req-26d0cd20-6f78-451b-99a9-1452a8f46d41 req-e1d17148-fa8d-4f8a-9044-cc5ce043439b service nova] Acquiring lock "refresh_cache-dcaef2e3-eb23-4a0b-b617-2880084e03ab" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.210438] env[68638]: DEBUG oslo_concurrency.lockutils [req-26d0cd20-6f78-451b-99a9-1452a8f46d41 req-e1d17148-fa8d-4f8a-9044-cc5ce043439b service nova] Acquired lock "refresh_cache-dcaef2e3-eb23-4a0b-b617-2880084e03ab" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1024.210613] env[68638]: DEBUG nova.network.neutron [req-26d0cd20-6f78-451b-99a9-1452a8f46d41 req-e1d17148-fa8d-4f8a-9044-cc5ce043439b service nova] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Refreshing network info cache for port afa7c854-c29e-429c-8b58-1c18417595ca {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1024.216648] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef78398-9ae5-4078-a030-6189a6923476 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.231919] env[68638]: DEBUG nova.compute.provider_tree [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1024.233996] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1024.234285] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5b1b6b23-b56c-4a7c-9c81-9d38bd4531c5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.242048] env[68638]: DEBUG oslo_vmware.api [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1024.242048] env[68638]: value = "task-2834113" [ 1024.242048] env[68638]: _type = "Task" [ 1024.242048] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.253525] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] VM already powered off {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1024.253785] env[68638]: DEBUG nova.compute.manager [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1024.254598] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-073aa0c4-946f-4ce7-b542-ef992a5ef31c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.262105] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "refresh_cache-0249ffb9-82ed-44db-bb20-e619eaa176dd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.262288] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquired lock "refresh_cache-0249ffb9-82ed-44db-bb20-e619eaa176dd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1024.262468] env[68638]: DEBUG nova.network.neutron [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1024.297187] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523adeaf-5fd1-d213-e6a0-d33da6927b2b, 'name': SearchDatastore_Task, 'duration_secs': 0.011033} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.297486] env[68638]: DEBUG oslo_concurrency.lockutils [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1024.297761] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] e7559933-fecc-4eb6-ba71-a295fba684e4/e7559933-fecc-4eb6-ba71-a295fba684e4.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1024.298069] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-41d1937f-0ea2-436b-a46e-8f3edfe9561f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.305906] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1024.305906] env[68638]: value = "task-2834114" [ 1024.305906] env[68638]: _type = "Task" [ 1024.305906] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.315827] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834114, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.401534] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Acquiring lock "refresh_cache-dcaef2e3-eb23-4a0b-b617-2880084e03ab" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.633237] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834112, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.935192} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.633547] env[68638]: INFO nova.virt.vmwareapi.ds_util [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 7d99d946-f2df-4d31-911f-ac479849b901/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9-rescue.vmdk. [ 1024.634472] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b62a290-8418-47f2-99c4-f08cf088472e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.662194] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 7d99d946-f2df-4d31-911f-ac479849b901/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9-rescue.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1024.662697] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e94f567f-3cb5-4ef3-879c-101aa914aa15 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.681719] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1024.681719] env[68638]: value = "task-2834115" [ 1024.681719] env[68638]: _type = "Task" [ 1024.681719] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.689674] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834115, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.735122] env[68638]: DEBUG nova.scheduler.client.report [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1024.770914] env[68638]: DEBUG nova.network.neutron [req-26d0cd20-6f78-451b-99a9-1452a8f46d41 req-e1d17148-fa8d-4f8a-9044-cc5ce043439b service nova] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1024.781395] env[68638]: DEBUG nova.compute.manager [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1024.817669] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834114, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.820296] env[68638]: DEBUG nova.virt.hardware [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1024.820609] env[68638]: DEBUG nova.virt.hardware [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1024.820837] env[68638]: DEBUG nova.virt.hardware [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1024.821061] env[68638]: DEBUG nova.virt.hardware [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1024.821227] env[68638]: DEBUG nova.virt.hardware [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1024.821379] env[68638]: DEBUG nova.virt.hardware [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1024.821597] env[68638]: DEBUG nova.virt.hardware [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1024.821828] env[68638]: DEBUG nova.virt.hardware [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1024.821980] env[68638]: DEBUG nova.virt.hardware [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1024.822190] env[68638]: DEBUG nova.virt.hardware [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1024.822435] env[68638]: DEBUG nova.virt.hardware [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1024.823763] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2363d8a1-2300-4580-a7b7-c34f61ee26eb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.833700] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7797ae-0b8f-4c88-b8f3-8e638e301421 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.893704] env[68638]: DEBUG nova.network.neutron [req-26d0cd20-6f78-451b-99a9-1452a8f46d41 req-e1d17148-fa8d-4f8a-9044-cc5ce043439b service nova] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.096227] env[68638]: DEBUG nova.network.neutron [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Updating instance_info_cache with network_info: [{"id": "d0023f1c-323c-4f1c-a82c-45ad56565341", "address": "fa:16:3e:33:9f:b3", "network": {"id": "4723bf0f-71b1-4997-b6ce-06f7dbda84df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-488516254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee5d59c43e974d04ba56981f2716ff60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0023f1c-32", "ovs_interfaceid": "d0023f1c-323c-4f1c-a82c-45ad56565341", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.194376] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834115, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.241402] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.484s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.242305] env[68638]: DEBUG nova.compute.manager [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1025.250025] env[68638]: DEBUG oslo_concurrency.lockutils [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.684s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.250025] env[68638]: INFO nova.compute.claims [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1025.318302] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834114, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.401512] env[68638]: DEBUG oslo_concurrency.lockutils [req-26d0cd20-6f78-451b-99a9-1452a8f46d41 req-e1d17148-fa8d-4f8a-9044-cc5ce043439b service nova] Releasing lock "refresh_cache-dcaef2e3-eb23-4a0b-b617-2880084e03ab" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1025.401920] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Acquired lock "refresh_cache-dcaef2e3-eb23-4a0b-b617-2880084e03ab" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1025.402099] env[68638]: DEBUG nova.network.neutron [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1025.540919] env[68638]: DEBUG nova.network.neutron [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Successfully updated port: f99283c7-566e-4386-b66a-6295a6b67f68 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1025.599590] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Releasing lock "refresh_cache-0249ffb9-82ed-44db-bb20-e619eaa176dd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1025.698235] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834115, 'name': ReconfigVM_Task, 'duration_secs': 0.606239} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.698235] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 7d99d946-f2df-4d31-911f-ac479849b901/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9-rescue.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1025.698235] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91fdbc4-b0ef-4630-8fb2-6b15ebf98837 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.733182] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8530084c-9b62-463f-89df-3efff9e9073f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.751575] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1025.751575] env[68638]: value = "task-2834116" [ 1025.751575] env[68638]: _type = "Task" [ 1025.751575] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.756502] env[68638]: DEBUG nova.compute.utils [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1025.763644] env[68638]: DEBUG nova.compute.manager [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Not allocating networking since 'none' was specified. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1025.766063] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834116, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.817947] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834114, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.947706] env[68638]: DEBUG nova.network.neutron [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1026.030513] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1026.031527] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86aff0e3-baf9-4c75-992c-a023823e8edb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.039828] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1026.040118] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a617395a-3d3e-45d7-bea9-7234495feb15 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.043481] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "refresh_cache-4765bf70-1a72-4102-b5d3-ccedb7c383ea" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.043638] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired lock "refresh_cache-4765bf70-1a72-4102-b5d3-ccedb7c383ea" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1026.043788] env[68638]: DEBUG nova.network.neutron [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1026.125816] env[68638]: DEBUG nova.network.neutron [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Updating instance_info_cache with network_info: [{"id": "afa7c854-c29e-429c-8b58-1c18417595ca", "address": "fa:16:3e:44:4e:eb", "network": {"id": "044efe76-33f3-4683-898b-36f6a62d43f0", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1261544082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de061db065b148bdae3776236a685cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapafa7c854-c2", "ovs_interfaceid": "afa7c854-c29e-429c-8b58-1c18417595ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.144709] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1026.145094] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1026.145463] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Deleting the datastore file [datastore1] 0249ffb9-82ed-44db-bb20-e619eaa176dd {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1026.145860] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8fef0832-f9d3-4be3-aad9-06d82146ca53 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.154907] env[68638]: DEBUG oslo_vmware.api [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1026.154907] env[68638]: value = "task-2834118" [ 1026.154907] env[68638]: _type = "Task" [ 1026.154907] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.167208] env[68638]: DEBUG oslo_vmware.api [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834118, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.252766] env[68638]: DEBUG nova.compute.manager [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Received event network-vif-plugged-f99283c7-566e-4386-b66a-6295a6b67f68 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1026.252984] env[68638]: DEBUG oslo_concurrency.lockutils [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] Acquiring lock "4765bf70-1a72-4102-b5d3-ccedb7c383ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1026.253281] env[68638]: DEBUG oslo_concurrency.lockutils [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] Lock "4765bf70-1a72-4102-b5d3-ccedb7c383ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1026.253610] env[68638]: DEBUG oslo_concurrency.lockutils [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] Lock "4765bf70-1a72-4102-b5d3-ccedb7c383ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.253812] env[68638]: DEBUG nova.compute.manager [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] No waiting events found dispatching network-vif-plugged-f99283c7-566e-4386-b66a-6295a6b67f68 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1026.253988] env[68638]: WARNING nova.compute.manager [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Received unexpected event network-vif-plugged-f99283c7-566e-4386-b66a-6295a6b67f68 for instance with vm_state building and task_state spawning. [ 1026.254165] env[68638]: DEBUG nova.compute.manager [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Received event network-changed-f99283c7-566e-4386-b66a-6295a6b67f68 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1026.254321] env[68638]: DEBUG nova.compute.manager [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Refreshing instance network info cache due to event network-changed-f99283c7-566e-4386-b66a-6295a6b67f68. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1026.254562] env[68638]: DEBUG oslo_concurrency.lockutils [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] Acquiring lock "refresh_cache-4765bf70-1a72-4102-b5d3-ccedb7c383ea" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.261894] env[68638]: DEBUG nova.compute.manager [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1026.270030] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834116, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.330941] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834114, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.81038} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.336650] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] e7559933-fecc-4eb6-ba71-a295fba684e4/e7559933-fecc-4eb6-ba71-a295fba684e4.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1026.336650] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1026.339297] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3c7cff82-07bc-46a3-89fb-2cabc01b6b5a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.347960] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1026.347960] env[68638]: value = "task-2834119" [ 1026.347960] env[68638]: _type = "Task" [ 1026.347960] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.359540] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834119, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.608095] env[68638]: DEBUG nova.network.neutron [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1026.630389] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Releasing lock "refresh_cache-dcaef2e3-eb23-4a0b-b617-2880084e03ab" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1026.630698] env[68638]: DEBUG nova.compute.manager [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Instance network_info: |[{"id": "afa7c854-c29e-429c-8b58-1c18417595ca", "address": "fa:16:3e:44:4e:eb", "network": {"id": "044efe76-33f3-4683-898b-36f6a62d43f0", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1261544082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de061db065b148bdae3776236a685cef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "209639b9-c313-4b35-86dc-dccd744d174a", "external-id": "nsx-vlan-transportzone-868", "segmentation_id": 868, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapafa7c854-c2", "ovs_interfaceid": "afa7c854-c29e-429c-8b58-1c18417595ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1026.631118] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:4e:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '209639b9-c313-4b35-86dc-dccd744d174a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'afa7c854-c29e-429c-8b58-1c18417595ca', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1026.639258] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Creating folder: Project (de061db065b148bdae3776236a685cef). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1026.639853] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-04fb68bd-941e-4fff-8cb8-462228a8101f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.652055] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Created folder: Project (de061db065b148bdae3776236a685cef) in parent group-v569734. [ 1026.652259] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Creating folder: Instances. Parent ref: group-v569988. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1026.652503] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3a32057d-462c-4df7-8cdb-2c2d485606d7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.664042] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Created folder: Instances in parent group-v569988. [ 1026.664287] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1026.664477] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1026.665188] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d3f82375-cce4-4c90-acf7-418853dff633 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.686724] env[68638]: DEBUG oslo_vmware.api [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834118, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.256218} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.686724] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1026.686724] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1026.687034] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1026.690445] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1026.690445] env[68638]: value = "task-2834122" [ 1026.690445] env[68638]: _type = "Task" [ 1026.690445] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.703653] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834122, 'name': CreateVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.709110] env[68638]: INFO nova.scheduler.client.report [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Deleted allocations for instance 0249ffb9-82ed-44db-bb20-e619eaa176dd [ 1026.732879] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fcc912e-2587-449d-8516-86efc90878c2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.741080] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e2ca99d-5497-4b5b-8f04-5d4c633ff0e1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.777018] env[68638]: DEBUG nova.network.neutron [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Updating instance_info_cache with network_info: [{"id": "f99283c7-566e-4386-b66a-6295a6b67f68", "address": "fa:16:3e:75:83:c1", "network": {"id": "e7719a30-81aa-48f1-a272-5246f78d9891", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1890376720-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fdd5447a0546b7b0fe2ed9ea0efc73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf99283c7-56", "ovs_interfaceid": "f99283c7-566e-4386-b66a-6295a6b67f68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.782474] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b231b923-1068-4212-ba05-ba3f88e402a6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.790930] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834116, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.794993] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34941fc9-9df2-4a6e-b120-4cd95d9ecab0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.811235] env[68638]: DEBUG nova.compute.provider_tree [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1026.859280] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834119, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078115} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.859587] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1026.860475] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c74625-89a2-4288-96d2-aeb6d823d070 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.883337] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] e7559933-fecc-4eb6-ba71-a295fba684e4/e7559933-fecc-4eb6-ba71-a295fba684e4.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1026.883660] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a381ef70-94f5-4555-ba58-4c5e54ba626d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.905237] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1026.905237] env[68638]: value = "task-2834123" [ 1026.905237] env[68638]: _type = "Task" [ 1026.905237] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.916937] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834123, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.200711] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834122, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.213079] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.278439] env[68638]: DEBUG nova.compute.manager [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1027.284266] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834116, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.286948] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Releasing lock "refresh_cache-4765bf70-1a72-4102-b5d3-ccedb7c383ea" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1027.286948] env[68638]: DEBUG nova.compute.manager [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Instance network_info: |[{"id": "f99283c7-566e-4386-b66a-6295a6b67f68", "address": "fa:16:3e:75:83:c1", "network": {"id": "e7719a30-81aa-48f1-a272-5246f78d9891", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1890376720-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fdd5447a0546b7b0fe2ed9ea0efc73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf99283c7-56", "ovs_interfaceid": "f99283c7-566e-4386-b66a-6295a6b67f68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1027.286948] env[68638]: DEBUG oslo_concurrency.lockutils [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] Acquired lock "refresh_cache-4765bf70-1a72-4102-b5d3-ccedb7c383ea" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1027.286948] env[68638]: DEBUG nova.network.neutron [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Refreshing network info cache for port f99283c7-566e-4386-b66a-6295a6b67f68 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1027.287862] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:83:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f99283c7-566e-4386-b66a-6295a6b67f68', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1027.296763] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Creating folder: Project (35fdd5447a0546b7b0fe2ed9ea0efc73). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1027.300339] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f197dca0-52cb-4f22-a6b1-8a015aa5d77a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.312695] env[68638]: DEBUG nova.virt.hardware [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1027.312927] env[68638]: DEBUG nova.virt.hardware [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1027.314837] env[68638]: DEBUG nova.virt.hardware [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1027.314837] env[68638]: DEBUG nova.virt.hardware [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1027.314837] env[68638]: DEBUG nova.virt.hardware [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1027.314837] env[68638]: DEBUG nova.virt.hardware [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1027.314837] env[68638]: DEBUG nova.virt.hardware [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1027.314837] env[68638]: DEBUG nova.virt.hardware [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1027.314837] env[68638]: DEBUG nova.virt.hardware [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1027.314837] env[68638]: DEBUG nova.virt.hardware [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1027.314837] env[68638]: DEBUG nova.virt.hardware [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1027.315527] env[68638]: DEBUG nova.scheduler.client.report [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1027.319789] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e312dc-e602-4ef1-99ad-7dc801f92f69 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.324028] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Created folder: Project (35fdd5447a0546b7b0fe2ed9ea0efc73) in parent group-v569734. [ 1027.324246] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Creating folder: Instances. Parent ref: group-v569991. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1027.325619] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-28b9ceab-7399-481f-9418-2698f7650547 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.331132] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-637f8c47-f044-4f99-8ce7-6c958874bd6d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.341711] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Created folder: Instances in parent group-v569991. [ 1027.342052] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1027.342932] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1027.351161] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3d3376ed-b17d-48ab-b7e5-5aabb12c2523 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.368228] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Instance VIF info [] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1027.373854] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Creating folder: Project (af13d5d8bed54807ae01fb01a4e2397e). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1027.375015] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-59692f9c-45c9-489d-8bd7-b3fdeb648527 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.380301] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1027.380301] env[68638]: value = "task-2834126" [ 1027.380301] env[68638]: _type = "Task" [ 1027.380301] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.387225] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Created folder: Project (af13d5d8bed54807ae01fb01a4e2397e) in parent group-v569734. [ 1027.387225] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Creating folder: Instances. Parent ref: group-v569993. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1027.387476] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e5b7e0e5-4d0c-4336-ab3c-2ba07ff9d4e6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.397020] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834126, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.400980] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Created folder: Instances in parent group-v569993. [ 1027.401284] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1027.401517] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1027.401760] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e17614e-01dc-4ddf-a482-66e7a5d366f8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.425952] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834123, 'name': ReconfigVM_Task, 'duration_secs': 0.368857} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.427690] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Reconfigured VM instance instance-0000005c to attach disk [datastore2] e7559933-fecc-4eb6-ba71-a295fba684e4/e7559933-fecc-4eb6-ba71-a295fba684e4.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1027.428436] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1027.428436] env[68638]: value = "task-2834129" [ 1027.428436] env[68638]: _type = "Task" [ 1027.428436] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.428645] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ca6ae3a8-27e0-4888-947e-bb7f88eb9f57 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.440893] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834129, 'name': CreateVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.442306] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1027.442306] env[68638]: value = "task-2834130" [ 1027.442306] env[68638]: _type = "Task" [ 1027.442306] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.453999] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834130, 'name': Rename_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.569571] env[68638]: DEBUG nova.network.neutron [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Updated VIF entry in instance network info cache for port f99283c7-566e-4386-b66a-6295a6b67f68. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1027.569962] env[68638]: DEBUG nova.network.neutron [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Updating instance_info_cache with network_info: [{"id": "f99283c7-566e-4386-b66a-6295a6b67f68", "address": "fa:16:3e:75:83:c1", "network": {"id": "e7719a30-81aa-48f1-a272-5246f78d9891", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1890376720-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fdd5447a0546b7b0fe2ed9ea0efc73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf99283c7-56", "ovs_interfaceid": "f99283c7-566e-4386-b66a-6295a6b67f68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.702719] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834122, 'name': CreateVM_Task, 'duration_secs': 0.908515} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.702847] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1027.703515] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.703731] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1027.704069] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1027.704340] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5256ede7-d673-47f3-aaf3-36c7a9a24983 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.709565] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Waiting for the task: (returnval){ [ 1027.709565] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52b01fd9-cb03-c1dc-aab1-721561e854a5" [ 1027.709565] env[68638]: _type = "Task" [ 1027.709565] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.720110] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b01fd9-cb03-c1dc-aab1-721561e854a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.782594] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834116, 'name': ReconfigVM_Task, 'duration_secs': 1.580989} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.782868] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1027.783133] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a24c7b91-52c8-413c-a303-9a4ae2d09274 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.789032] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1027.789032] env[68638]: value = "task-2834131" [ 1027.789032] env[68638]: _type = "Task" [ 1027.789032] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.796784] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834131, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.824690] env[68638]: DEBUG oslo_concurrency.lockutils [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.578s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.825555] env[68638]: DEBUG nova.compute.manager [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1027.828528] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.156s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.830184] env[68638]: INFO nova.compute.claims [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1027.892579] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834126, 'name': CreateVM_Task, 'duration_secs': 0.383915} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.892760] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1027.893416] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.939945] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834129, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.951509] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834130, 'name': Rename_Task, 'duration_secs': 0.167752} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.951804] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1027.952070] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9a762aec-6637-45fd-aa43-18db7374a04d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.957243] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1027.957243] env[68638]: value = "task-2834132" [ 1027.957243] env[68638]: _type = "Task" [ 1027.957243] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.965422] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834132, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.073098] env[68638]: DEBUG oslo_concurrency.lockutils [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] Releasing lock "refresh_cache-4765bf70-1a72-4102-b5d3-ccedb7c383ea" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1028.073405] env[68638]: DEBUG nova.compute.manager [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Received event network-vif-unplugged-d0023f1c-323c-4f1c-a82c-45ad56565341 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1028.073616] env[68638]: DEBUG oslo_concurrency.lockutils [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] Acquiring lock "0249ffb9-82ed-44db-bb20-e619eaa176dd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.073831] env[68638]: DEBUG oslo_concurrency.lockutils [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] Lock "0249ffb9-82ed-44db-bb20-e619eaa176dd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.073996] env[68638]: DEBUG oslo_concurrency.lockutils [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] Lock "0249ffb9-82ed-44db-bb20-e619eaa176dd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1028.074188] env[68638]: DEBUG nova.compute.manager [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] No waiting events found dispatching network-vif-unplugged-d0023f1c-323c-4f1c-a82c-45ad56565341 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1028.074361] env[68638]: WARNING nova.compute.manager [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Received unexpected event network-vif-unplugged-d0023f1c-323c-4f1c-a82c-45ad56565341 for instance with vm_state shelved and task_state shelving_offloading. [ 1028.074626] env[68638]: DEBUG nova.compute.manager [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Received event network-changed-d0023f1c-323c-4f1c-a82c-45ad56565341 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1028.074795] env[68638]: DEBUG nova.compute.manager [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Refreshing instance network info cache due to event network-changed-d0023f1c-323c-4f1c-a82c-45ad56565341. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1028.074985] env[68638]: DEBUG oslo_concurrency.lockutils [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] Acquiring lock "refresh_cache-0249ffb9-82ed-44db-bb20-e619eaa176dd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.075543] env[68638]: DEBUG oslo_concurrency.lockutils [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] Acquired lock "refresh_cache-0249ffb9-82ed-44db-bb20-e619eaa176dd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.075924] env[68638]: DEBUG nova.network.neutron [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Refreshing network info cache for port d0023f1c-323c-4f1c-a82c-45ad56565341 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1028.220477] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b01fd9-cb03-c1dc-aab1-721561e854a5, 'name': SearchDatastore_Task, 'duration_secs': 0.010307} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.220477] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1028.220717] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1028.220938] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.221100] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.221285] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1028.221566] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.221867] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1028.222106] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fdbaffbf-8107-46ae-92d3-ac4aa0edbc35 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.223889] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2d6f256-5e0e-4d06-93a7-d492527e8d01 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.230331] env[68638]: DEBUG oslo_vmware.api [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1028.230331] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]527a3f2a-6f75-4ae4-4d93-9ca2e8fc2390" [ 1028.230331] env[68638]: _type = "Task" [ 1028.230331] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.234316] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1028.234508] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1028.235476] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22c394cd-1c8e-4110-94c4-00f71bb8d2e8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.240231] env[68638]: DEBUG oslo_vmware.api [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527a3f2a-6f75-4ae4-4d93-9ca2e8fc2390, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.243758] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Waiting for the task: (returnval){ [ 1028.243758] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]525fb39e-0900-caee-37ac-4ff1c6434b10" [ 1028.243758] env[68638]: _type = "Task" [ 1028.243758] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.251542] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525fb39e-0900-caee-37ac-4ff1c6434b10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.298677] env[68638]: DEBUG oslo_vmware.api [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834131, 'name': PowerOnVM_Task, 'duration_secs': 0.427328} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.298970] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1028.301879] env[68638]: DEBUG nova.compute.manager [None req-1a1ed27e-69f5-4a21-9a37-79bc4d1f0bd3 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1028.302638] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-185e5092-e62c-4b4b-b791-fc1eaa33e54e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.334702] env[68638]: DEBUG nova.compute.utils [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1028.337988] env[68638]: DEBUG nova.compute.manager [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1028.338137] env[68638]: DEBUG nova.network.neutron [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1028.376344] env[68638]: DEBUG nova.policy [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5b140aa82f044f108521ab8c0d28c0a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3e5757d1f74492481048df4a29032ca', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1028.441940] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834129, 'name': CreateVM_Task, 'duration_secs': 0.526267} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.442190] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1028.442600] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.466262] env[68638]: DEBUG oslo_vmware.api [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834132, 'name': PowerOnVM_Task, 'duration_secs': 0.475964} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.466592] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1028.466809] env[68638]: INFO nova.compute.manager [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Took 8.67 seconds to spawn the instance on the hypervisor. [ 1028.467031] env[68638]: DEBUG nova.compute.manager [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1028.467755] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f65eae7b-37b4-463e-b2d0-1b2df04ef79a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.638885] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "4c954bb4-6291-47d5-a65c-0ad92a0fd193" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.639185] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "4c954bb4-6291-47d5-a65c-0ad92a0fd193" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.639390] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "4c954bb4-6291-47d5-a65c-0ad92a0fd193-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.639575] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "4c954bb4-6291-47d5-a65c-0ad92a0fd193-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.639837] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "4c954bb4-6291-47d5-a65c-0ad92a0fd193-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1028.642123] env[68638]: INFO nova.compute.manager [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Terminating instance [ 1028.729707] env[68638]: DEBUG nova.network.neutron [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Successfully created port: 14400668-d5ea-4861-8521-351f3d71704a {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1028.743115] env[68638]: DEBUG oslo_vmware.api [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527a3f2a-6f75-4ae4-4d93-9ca2e8fc2390, 'name': SearchDatastore_Task, 'duration_secs': 0.023715} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.745805] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1028.746396] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1028.746662] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.746980] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.747293] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1028.750453] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1debcf89-3930-415b-b58c-ddcbbbdd5c8c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.755855] env[68638]: DEBUG oslo_vmware.api [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Waiting for the task: (returnval){ [ 1028.755855] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5246207d-5ea3-430d-e454-985391621a2f" [ 1028.755855] env[68638]: _type = "Task" [ 1028.755855] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.760344] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525fb39e-0900-caee-37ac-4ff1c6434b10, 'name': SearchDatastore_Task, 'duration_secs': 0.010086} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.764053] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6503a2ef-fdeb-4986-a417-6b7f227fece4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.771313] env[68638]: DEBUG oslo_vmware.api [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5246207d-5ea3-430d-e454-985391621a2f, 'name': SearchDatastore_Task, 'duration_secs': 0.010191} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.772574] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1028.772953] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1028.772953] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.773248] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Waiting for the task: (returnval){ [ 1028.773248] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52055319-1017-c635-d18f-18a1a0630bed" [ 1028.773248] env[68638]: _type = "Task" [ 1028.773248] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.782665] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52055319-1017-c635-d18f-18a1a0630bed, 'name': SearchDatastore_Task, 'duration_secs': 0.008377} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.782825] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1028.783091] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] dcaef2e3-eb23-4a0b-b617-2880084e03ab/dcaef2e3-eb23-4a0b-b617-2880084e03ab.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1028.783699] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.783835] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1028.784074] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e73e4cbd-9410-4774-bc77-79c99baf55a8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.785903] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e942111-4c3f-4a59-afbc-d45d002df13f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.792436] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Waiting for the task: (returnval){ [ 1028.792436] env[68638]: value = "task-2834133" [ 1028.792436] env[68638]: _type = "Task" [ 1028.792436] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.796222] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1028.796501] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1028.797707] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc2ac458-cc27-41f6-ae63-03ae3ef5ed5e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.803021] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Task: {'id': task-2834133, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.805748] env[68638]: DEBUG oslo_vmware.api [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1028.805748] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e11b79-97bf-f8ad-8736-66a3871aaa7c" [ 1028.805748] env[68638]: _type = "Task" [ 1028.805748] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.816636] env[68638]: DEBUG oslo_vmware.api [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e11b79-97bf-f8ad-8736-66a3871aaa7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.844573] env[68638]: DEBUG nova.compute.manager [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1028.850954] env[68638]: DEBUG nova.network.neutron [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Updated VIF entry in instance network info cache for port d0023f1c-323c-4f1c-a82c-45ad56565341. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1028.852901] env[68638]: DEBUG nova.network.neutron [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Updating instance_info_cache with network_info: [{"id": "d0023f1c-323c-4f1c-a82c-45ad56565341", "address": "fa:16:3e:33:9f:b3", "network": {"id": "4723bf0f-71b1-4997-b6ce-06f7dbda84df", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-488516254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee5d59c43e974d04ba56981f2716ff60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapd0023f1c-32", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.988793] env[68638]: INFO nova.compute.manager [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Took 38.14 seconds to build instance. [ 1029.016883] env[68638]: DEBUG oslo_concurrency.lockutils [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "0249ffb9-82ed-44db-bb20-e619eaa176dd" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1029.145791] env[68638]: DEBUG nova.compute.manager [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1029.146066] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1029.147204] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d9749f0-d13d-4b9d-a420-0304b9b75919 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.157726] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1029.157894] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-28c404ca-506d-46a7-9c7e-a1e277a54117 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.166658] env[68638]: DEBUG oslo_vmware.api [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1029.166658] env[68638]: value = "task-2834134" [ 1029.166658] env[68638]: _type = "Task" [ 1029.166658] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.180204] env[68638]: DEBUG oslo_vmware.api [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834134, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.213125] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96454474-ffba-4294-87ec-eeb2b99fcf1a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.221453] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87c24232-9443-46e2-9e3a-0b749c9b39d3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.257648] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75046197-546c-4cc4-a8ea-42b25382be9c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.267111] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2ddf756-0f99-4c2f-97d2-42015da2d215 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.287265] env[68638]: DEBUG nova.compute.provider_tree [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1029.304098] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Task: {'id': task-2834133, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.315098] env[68638]: DEBUG oslo_vmware.api [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e11b79-97bf-f8ad-8736-66a3871aaa7c, 'name': SearchDatastore_Task, 'duration_secs': 0.008094} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.315988] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1d1018f-931c-4b65-b2bd-7915b205b44c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.322344] env[68638]: DEBUG oslo_vmware.api [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1029.322344] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52edc367-d7ab-0167-1716-09597f601e14" [ 1029.322344] env[68638]: _type = "Task" [ 1029.322344] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.331871] env[68638]: DEBUG oslo_vmware.api [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52edc367-d7ab-0167-1716-09597f601e14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.353171] env[68638]: DEBUG oslo_concurrency.lockutils [req-77a74793-1906-43de-a483-fadc172ce53c req-313d2cf7-b12b-40aa-abe1-15166c9c5354 service nova] Releasing lock "refresh_cache-0249ffb9-82ed-44db-bb20-e619eaa176dd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1029.491844] env[68638]: DEBUG oslo_concurrency.lockutils [None req-23159756-27fa-4055-8538-3414228ce810 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "e7559933-fecc-4eb6-ba71-a295fba684e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.652s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1029.678486] env[68638]: DEBUG oslo_vmware.api [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834134, 'name': PowerOffVM_Task, 'duration_secs': 0.267105} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.678766] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1029.678933] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1029.679201] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ce38f542-5633-45e4-a4ff-fd84ce2a6a01 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.762533] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1029.762806] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1029.762930] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Deleting the datastore file [datastore1] 4c954bb4-6291-47d5-a65c-0ad92a0fd193 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1029.763221] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-52e6fb35-34ea-496b-a514-6e5d2352cd20 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.769501] env[68638]: DEBUG oslo_vmware.api [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1029.769501] env[68638]: value = "task-2834136" [ 1029.769501] env[68638]: _type = "Task" [ 1029.769501] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.777708] env[68638]: DEBUG oslo_vmware.api [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834136, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.790256] env[68638]: DEBUG nova.scheduler.client.report [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1029.804713] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Task: {'id': task-2834133, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513699} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.805696] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] dcaef2e3-eb23-4a0b-b617-2880084e03ab/dcaef2e3-eb23-4a0b-b617-2880084e03ab.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1029.805931] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1029.806217] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1364a328-3e9e-42d7-9abc-231ba82e3c41 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.813198] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Waiting for the task: (returnval){ [ 1029.813198] env[68638]: value = "task-2834137" [ 1029.813198] env[68638]: _type = "Task" [ 1029.813198] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.821774] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Task: {'id': task-2834137, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.832079] env[68638]: DEBUG oslo_vmware.api [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52edc367-d7ab-0167-1716-09597f601e14, 'name': SearchDatastore_Task, 'duration_secs': 0.010208} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.832360] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1029.832638] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 4765bf70-1a72-4102-b5d3-ccedb7c383ea/4765bf70-1a72-4102-b5d3-ccedb7c383ea.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1029.832950] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1029.833227] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1029.833485] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1f82949-7369-43d9-b908-2e7468dc15c4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.835537] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9e39cce9-40d7-43e5-be16-bc276a3c2531 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.841424] env[68638]: DEBUG oslo_vmware.api [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1029.841424] env[68638]: value = "task-2834138" [ 1029.841424] env[68638]: _type = "Task" [ 1029.841424] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.845900] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1029.846122] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1029.847232] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-096989fb-d0b5-4d53-b006-a6b533b99db9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.853647] env[68638]: DEBUG oslo_vmware.api [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834138, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.854789] env[68638]: DEBUG nova.compute.manager [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1029.859705] env[68638]: DEBUG oslo_vmware.api [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Waiting for the task: (returnval){ [ 1029.859705] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d37de6-57c0-032a-e1f3-adc912aa6e7b" [ 1029.859705] env[68638]: _type = "Task" [ 1029.859705] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.867957] env[68638]: DEBUG oslo_vmware.api [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d37de6-57c0-032a-e1f3-adc912aa6e7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.894451] env[68638]: DEBUG nova.virt.hardware [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1029.894703] env[68638]: DEBUG nova.virt.hardware [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1029.894873] env[68638]: DEBUG nova.virt.hardware [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1029.895069] env[68638]: DEBUG nova.virt.hardware [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1029.895240] env[68638]: DEBUG nova.virt.hardware [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1029.895454] env[68638]: DEBUG nova.virt.hardware [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1029.895723] env[68638]: DEBUG nova.virt.hardware [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1029.895886] env[68638]: DEBUG nova.virt.hardware [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1029.896081] env[68638]: DEBUG nova.virt.hardware [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1029.896257] env[68638]: DEBUG nova.virt.hardware [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1029.896470] env[68638]: DEBUG nova.virt.hardware [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1029.897417] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b81e5b1-b238-44af-ae14-116937be88cf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.906022] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32581df3-5a58-40ed-96c8-f42434db75b5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.202494] env[68638]: DEBUG nova.compute.manager [req-d06749f4-daf2-4fbb-9fb4-93af70d65cab req-2ac98be2-196c-42ec-98ae-068ce6e9fd1e service nova] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Received event network-vif-plugged-14400668-d5ea-4861-8521-351f3d71704a {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1030.202743] env[68638]: DEBUG oslo_concurrency.lockutils [req-d06749f4-daf2-4fbb-9fb4-93af70d65cab req-2ac98be2-196c-42ec-98ae-068ce6e9fd1e service nova] Acquiring lock "71ec29a8-5e2f-4ccd-9c22-d9721c77622e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.202983] env[68638]: DEBUG oslo_concurrency.lockutils [req-d06749f4-daf2-4fbb-9fb4-93af70d65cab req-2ac98be2-196c-42ec-98ae-068ce6e9fd1e service nova] Lock "71ec29a8-5e2f-4ccd-9c22-d9721c77622e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.203353] env[68638]: DEBUG oslo_concurrency.lockutils [req-d06749f4-daf2-4fbb-9fb4-93af70d65cab req-2ac98be2-196c-42ec-98ae-068ce6e9fd1e service nova] Lock "71ec29a8-5e2f-4ccd-9c22-d9721c77622e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.203430] env[68638]: DEBUG nova.compute.manager [req-d06749f4-daf2-4fbb-9fb4-93af70d65cab req-2ac98be2-196c-42ec-98ae-068ce6e9fd1e service nova] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] No waiting events found dispatching network-vif-plugged-14400668-d5ea-4861-8521-351f3d71704a {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1030.203665] env[68638]: WARNING nova.compute.manager [req-d06749f4-daf2-4fbb-9fb4-93af70d65cab req-2ac98be2-196c-42ec-98ae-068ce6e9fd1e service nova] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Received unexpected event network-vif-plugged-14400668-d5ea-4861-8521-351f3d71704a for instance with vm_state building and task_state spawning. [ 1030.220039] env[68638]: DEBUG oslo_concurrency.lockutils [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "e7559933-fecc-4eb6-ba71-a295fba684e4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.220447] env[68638]: DEBUG oslo_concurrency.lockutils [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "e7559933-fecc-4eb6-ba71-a295fba684e4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.220721] env[68638]: DEBUG oslo_concurrency.lockutils [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "e7559933-fecc-4eb6-ba71-a295fba684e4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.220993] env[68638]: DEBUG oslo_concurrency.lockutils [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "e7559933-fecc-4eb6-ba71-a295fba684e4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.221299] env[68638]: DEBUG oslo_concurrency.lockutils [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "e7559933-fecc-4eb6-ba71-a295fba684e4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.223990] env[68638]: INFO nova.compute.manager [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Terminating instance [ 1030.281713] env[68638]: DEBUG oslo_vmware.api [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834136, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145035} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.282096] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1030.282373] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1030.283036] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1030.283036] env[68638]: INFO nova.compute.manager [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1030.283231] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1030.283476] env[68638]: DEBUG nova.compute.manager [-] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1030.283747] env[68638]: DEBUG nova.network.neutron [-] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1030.299277] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.470s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.302021] env[68638]: DEBUG nova.compute.manager [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1030.302779] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.232s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.303056] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.305501] env[68638]: DEBUG oslo_concurrency.lockutils [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.985s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.305726] env[68638]: DEBUG oslo_concurrency.lockutils [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.307570] env[68638]: DEBUG oslo_concurrency.lockutils [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.068s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.307752] env[68638]: DEBUG oslo_concurrency.lockutils [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.309747] env[68638]: DEBUG oslo_concurrency.lockutils [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.335s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.309985] env[68638]: DEBUG nova.objects.instance [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lazy-loading 'resources' on Instance uuid 32d43fce-837d-41d9-be11-a0c3cdb1694b {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1030.325767] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Task: {'id': task-2834137, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067296} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.326381] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1030.327050] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e2e203-3000-4467-91a5-8354b36a648c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.334149] env[68638]: DEBUG nova.network.neutron [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Successfully updated port: 14400668-d5ea-4861-8521-351f3d71704a {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1030.362136] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] dcaef2e3-eb23-4a0b-b617-2880084e03ab/dcaef2e3-eb23-4a0b-b617-2880084e03ab.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1030.363657] env[68638]: INFO nova.scheduler.client.report [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Deleted allocations for instance 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b [ 1030.365969] env[68638]: INFO nova.scheduler.client.report [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Deleted allocations for instance f0598d8d-09a9-44ce-b4d7-cb8830a84b94 [ 1030.371149] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-40b4c913-8be6-408b-87a3-23a82fe5c6fa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.392782] env[68638]: INFO nova.scheduler.client.report [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Deleted allocations for instance 9975e756-b571-4e70-ba50-a6001d0b064c [ 1030.408900] env[68638]: DEBUG oslo_vmware.api [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834138, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473816} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.413724] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 4765bf70-1a72-4102-b5d3-ccedb7c383ea/4765bf70-1a72-4102-b5d3-ccedb7c383ea.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1030.413965] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1030.414302] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Waiting for the task: (returnval){ [ 1030.414302] env[68638]: value = "task-2834139" [ 1030.414302] env[68638]: _type = "Task" [ 1030.414302] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.414506] env[68638]: DEBUG oslo_vmware.api [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d37de6-57c0-032a-e1f3-adc912aa6e7b, 'name': SearchDatastore_Task, 'duration_secs': 0.008902} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.414999] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-39902db0-f7bf-45a7-907f-5e2c07b0d51a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.421607] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00c162fc-1e28-4c52-b530-8e643c1cc67b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.431117] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Task: {'id': task-2834139, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.432646] env[68638]: DEBUG oslo_vmware.api [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Waiting for the task: (returnval){ [ 1030.432646] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d36168-2164-5da3-e946-27eab3399ee6" [ 1030.432646] env[68638]: _type = "Task" [ 1030.432646] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.432900] env[68638]: DEBUG oslo_vmware.api [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1030.432900] env[68638]: value = "task-2834140" [ 1030.432900] env[68638]: _type = "Task" [ 1030.432900] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.450232] env[68638]: DEBUG oslo_vmware.api [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834140, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.450484] env[68638]: DEBUG oslo_vmware.api [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d36168-2164-5da3-e946-27eab3399ee6, 'name': SearchDatastore_Task, 'duration_secs': 0.013829} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.451446] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1030.451728] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 2cdcff10-089b-47fd-ba41-2e3a75cd33b0/2cdcff10-089b-47fd-ba41-2e3a75cd33b0.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1030.452027] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b85afa9b-5230-4b55-9eb2-1e22272bfb6a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.459185] env[68638]: DEBUG oslo_vmware.api [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Waiting for the task: (returnval){ [ 1030.459185] env[68638]: value = "task-2834141" [ 1030.459185] env[68638]: _type = "Task" [ 1030.459185] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.469942] env[68638]: DEBUG oslo_vmware.api [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834141, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.729182] env[68638]: DEBUG nova.compute.manager [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1030.729734] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1030.730940] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd6bf3c-b20a-4617-abca-ffe4fec2a68d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.742580] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1030.743447] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a8d2bcc3-c808-48f8-90c6-60fd110caf0f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.752824] env[68638]: DEBUG oslo_vmware.api [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1030.752824] env[68638]: value = "task-2834142" [ 1030.752824] env[68638]: _type = "Task" [ 1030.752824] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.770731] env[68638]: DEBUG oslo_vmware.api [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834142, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.815942] env[68638]: DEBUG nova.compute.utils [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1030.821766] env[68638]: DEBUG nova.compute.manager [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Not allocating networking since 'none' was specified. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1030.838700] env[68638]: DEBUG oslo_concurrency.lockutils [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "refresh_cache-71ec29a8-5e2f-4ccd-9c22-d9721c77622e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.838700] env[68638]: DEBUG oslo_concurrency.lockutils [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquired lock "refresh_cache-71ec29a8-5e2f-4ccd-9c22-d9721c77622e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1030.838700] env[68638]: DEBUG nova.network.neutron [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1030.907291] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a26246eb-0a38-40b3-8728-98154cd7c3e7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "14c1dba5-98cb-4ebd-8e76-60b3f74cca4b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.602s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.911432] env[68638]: DEBUG oslo_concurrency.lockutils [None req-daeb971d-041f-4f0e-bf0f-a7c333c1e8ed tempest-InstanceActionsTestJSON-966218912 tempest-InstanceActionsTestJSON-966218912-project-member] Lock "f0598d8d-09a9-44ce-b4d7-cb8830a84b94" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.301s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.913101] env[68638]: DEBUG oslo_concurrency.lockutils [None req-69bb20e5-7cb4-4d03-9b8c-b45fefe84e9c tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "9975e756-b571-4e70-ba50-a6001d0b064c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.955s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.929778] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Task: {'id': task-2834139, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.933903] env[68638]: INFO nova.compute.manager [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Rescuing [ 1030.934256] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "refresh_cache-df2e066d-7c71-4aec-ab9b-a339a7ff21fb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.934455] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquired lock "refresh_cache-df2e066d-7c71-4aec-ab9b-a339a7ff21fb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1030.934644] env[68638]: DEBUG nova.network.neutron [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1030.951575] env[68638]: DEBUG oslo_vmware.api [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834140, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067065} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.951992] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1030.953049] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a285bb90-5df2-4454-8ccf-c4d0137669fe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.983184] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] 4765bf70-1a72-4102-b5d3-ccedb7c383ea/4765bf70-1a72-4102-b5d3-ccedb7c383ea.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1030.986218] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a41a7bcb-6365-419e-aab7-c746970929fc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.008218] env[68638]: DEBUG oslo_vmware.api [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834141, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475641} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.009617] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 2cdcff10-089b-47fd-ba41-2e3a75cd33b0/2cdcff10-089b-47fd-ba41-2e3a75cd33b0.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1031.009843] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1031.010169] env[68638]: DEBUG oslo_vmware.api [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1031.010169] env[68638]: value = "task-2834143" [ 1031.010169] env[68638]: _type = "Task" [ 1031.010169] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.010359] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-95a56f19-dda0-41e1-b98a-b374d4aa6527 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.023293] env[68638]: DEBUG oslo_vmware.api [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834143, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.024607] env[68638]: DEBUG oslo_vmware.api [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Waiting for the task: (returnval){ [ 1031.024607] env[68638]: value = "task-2834144" [ 1031.024607] env[68638]: _type = "Task" [ 1031.024607] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.031992] env[68638]: DEBUG nova.compute.manager [req-3d0e1963-e286-4d55-8a9e-cbb0b2fbc20a req-19a10016-8133-4389-a072-f5e662a279a1 service nova] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Received event network-vif-deleted-fb14533d-1dc9-4440-a62d-ab3ca16bc7f1 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1031.032215] env[68638]: INFO nova.compute.manager [req-3d0e1963-e286-4d55-8a9e-cbb0b2fbc20a req-19a10016-8133-4389-a072-f5e662a279a1 service nova] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Neutron deleted interface fb14533d-1dc9-4440-a62d-ab3ca16bc7f1; detaching it from the instance and deleting it from the info cache [ 1031.032413] env[68638]: DEBUG nova.network.neutron [req-3d0e1963-e286-4d55-8a9e-cbb0b2fbc20a req-19a10016-8133-4389-a072-f5e662a279a1 service nova] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.039216] env[68638]: DEBUG oslo_vmware.api [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834144, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.234339] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfdfe0b0-e520-4b30-804c-60f94b01ee63 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.244117] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb627863-485c-45aa-a78e-65f9a1bf2668 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.296695] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53a36a21-2bd3-4665-af2c-9abd429521fe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.299511] env[68638]: DEBUG oslo_vmware.api [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834142, 'name': PowerOffVM_Task, 'duration_secs': 0.26506} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.300157] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1031.301031] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1031.301031] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-363ce9d6-338c-46d0-97fa-ddfaf5fb9e07 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.306015] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89ad6dfa-6ece-46b0-a25d-71235af36fc1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.320588] env[68638]: DEBUG nova.compute.provider_tree [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.322610] env[68638]: DEBUG nova.compute.manager [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1031.374216] env[68638]: DEBUG nova.network.neutron [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1031.377190] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1031.377437] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1031.377572] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Deleting the datastore file [datastore2] e7559933-fecc-4eb6-ba71-a295fba684e4 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1031.377833] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-901c8675-0897-4130-bc44-e68b632c1ec4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.383674] env[68638]: DEBUG oslo_vmware.api [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1031.383674] env[68638]: value = "task-2834146" [ 1031.383674] env[68638]: _type = "Task" [ 1031.383674] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.391210] env[68638]: DEBUG oslo_vmware.api [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834146, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.425798] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Task: {'id': task-2834139, 'name': ReconfigVM_Task, 'duration_secs': 0.713565} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.426266] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Reconfigured VM instance instance-0000005d to attach disk [datastore2] dcaef2e3-eb23-4a0b-b617-2880084e03ab/dcaef2e3-eb23-4a0b-b617-2880084e03ab.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1031.426793] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-23d75057-1d44-4930-9eeb-b2ce46d8e597 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.434254] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Waiting for the task: (returnval){ [ 1031.434254] env[68638]: value = "task-2834147" [ 1031.434254] env[68638]: _type = "Task" [ 1031.434254] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.445258] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Task: {'id': task-2834147, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.453135] env[68638]: DEBUG nova.network.neutron [-] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.523058] env[68638]: DEBUG oslo_vmware.api [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834143, 'name': ReconfigVM_Task, 'duration_secs': 0.293364} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.523360] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Reconfigured VM instance instance-0000005e to attach disk [datastore2] 4765bf70-1a72-4102-b5d3-ccedb7c383ea/4765bf70-1a72-4102-b5d3-ccedb7c383ea.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1031.524099] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-571e82d9-3340-4547-9e2c-6a801cc43153 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.535035] env[68638]: DEBUG oslo_vmware.api [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834144, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08572} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.538729] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1031.538729] env[68638]: DEBUG oslo_vmware.api [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1031.538729] env[68638]: value = "task-2834148" [ 1031.538729] env[68638]: _type = "Task" [ 1031.538729] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.539386] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0614577-185b-4296-87f2-8ef42a9678e2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.551072] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9e399c35-0946-48fe-9978-bcdb77e384dc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.572206] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] 2cdcff10-089b-47fd-ba41-2e3a75cd33b0/2cdcff10-089b-47fd-ba41-2e3a75cd33b0.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1031.580969] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef964f22-2d4f-405d-a7d2-dd6b6a420e66 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.598601] env[68638]: DEBUG oslo_vmware.api [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834148, 'name': Rename_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.599666] env[68638]: DEBUG nova.network.neutron [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Updating instance_info_cache with network_info: [{"id": "14400668-d5ea-4861-8521-351f3d71704a", "address": "fa:16:3e:4d:b2:4c", "network": {"id": "104a324f-fd5a-4c74-9a7a-6126392ea10c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1310127541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3e5757d1f74492481048df4a29032ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14400668-d5", "ovs_interfaceid": "14400668-d5ea-4861-8521-351f3d71704a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.604383] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b59a40e-7103-4e17-9062-721f3e508c77 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.625182] env[68638]: DEBUG oslo_vmware.api [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Waiting for the task: (returnval){ [ 1031.625182] env[68638]: value = "task-2834149" [ 1031.625182] env[68638]: _type = "Task" [ 1031.625182] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.634836] env[68638]: DEBUG oslo_vmware.api [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834149, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.646680] env[68638]: DEBUG nova.compute.manager [req-3d0e1963-e286-4d55-8a9e-cbb0b2fbc20a req-19a10016-8133-4389-a072-f5e662a279a1 service nova] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Detach interface failed, port_id=fb14533d-1dc9-4440-a62d-ab3ca16bc7f1, reason: Instance 4c954bb4-6291-47d5-a65c-0ad92a0fd193 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1031.829936] env[68638]: DEBUG nova.scheduler.client.report [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1031.892481] env[68638]: DEBUG nova.network.neutron [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Updating instance_info_cache with network_info: [{"id": "53ebdba3-fcaa-435f-a048-dd22fa9cc3b5", "address": "fa:16:3e:a8:3e:ae", "network": {"id": "5f368894-f202-48ed-bdd5-62442b47a35d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2025484418-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e7777e8e5d342d68e2f54e23d125314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53ebdba3-fc", "ovs_interfaceid": "53ebdba3-fcaa-435f-a048-dd22fa9cc3b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.897295] env[68638]: DEBUG oslo_vmware.api [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834146, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.240426} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.897831] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1031.898188] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1031.898497] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1031.898799] env[68638]: INFO nova.compute.manager [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1031.899211] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1031.899557] env[68638]: DEBUG nova.compute.manager [-] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1031.899801] env[68638]: DEBUG nova.network.neutron [-] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1031.946528] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Task: {'id': task-2834147, 'name': Rename_Task, 'duration_secs': 0.467632} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.946942] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1031.947322] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4047f387-7dd4-48cf-a2df-96472fdb2c73 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.954246] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Waiting for the task: (returnval){ [ 1031.954246] env[68638]: value = "task-2834150" [ 1031.954246] env[68638]: _type = "Task" [ 1031.954246] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.958253] env[68638]: INFO nova.compute.manager [-] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Took 1.67 seconds to deallocate network for instance. [ 1031.968442] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Task: {'id': task-2834150, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.054429] env[68638]: DEBUG oslo_vmware.api [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834148, 'name': Rename_Task, 'duration_secs': 0.148127} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.055042] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1032.055874] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ba8830cf-c66e-45b1-b329-d603f2bb93e6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.065643] env[68638]: DEBUG oslo_vmware.api [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1032.065643] env[68638]: value = "task-2834151" [ 1032.065643] env[68638]: _type = "Task" [ 1032.065643] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.078269] env[68638]: DEBUG oslo_vmware.api [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834151, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.103228] env[68638]: DEBUG oslo_concurrency.lockutils [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Releasing lock "refresh_cache-71ec29a8-5e2f-4ccd-9c22-d9721c77622e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1032.103228] env[68638]: DEBUG nova.compute.manager [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Instance network_info: |[{"id": "14400668-d5ea-4861-8521-351f3d71704a", "address": "fa:16:3e:4d:b2:4c", "network": {"id": "104a324f-fd5a-4c74-9a7a-6126392ea10c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1310127541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3e5757d1f74492481048df4a29032ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14400668-d5", "ovs_interfaceid": "14400668-d5ea-4861-8521-351f3d71704a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1032.103746] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:b2:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3d2e4070-a78e-4d08-a104-b6312ab65577', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '14400668-d5ea-4861-8521-351f3d71704a', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1032.112575] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1032.113057] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1032.113695] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ca3f4349-45e4-4722-a40c-54989323377d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.147152] env[68638]: DEBUG oslo_vmware.api [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834149, 'name': ReconfigVM_Task, 'duration_secs': 0.279295} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.148946] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Reconfigured VM instance instance-0000005f to attach disk [datastore2] 2cdcff10-089b-47fd-ba41-2e3a75cd33b0/2cdcff10-089b-47fd-ba41-2e3a75cd33b0.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1032.149795] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1032.149795] env[68638]: value = "task-2834152" [ 1032.149795] env[68638]: _type = "Task" [ 1032.149795] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.150356] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-72f23de1-6bf0-4d82-b453-fa3571390091 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.163111] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834152, 'name': CreateVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.163111] env[68638]: DEBUG oslo_vmware.api [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Waiting for the task: (returnval){ [ 1032.163111] env[68638]: value = "task-2834153" [ 1032.163111] env[68638]: _type = "Task" [ 1032.163111] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.172669] env[68638]: DEBUG oslo_vmware.api [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834153, 'name': Rename_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.336736] env[68638]: DEBUG nova.compute.manager [req-7125da27-6eda-4786-aea7-e42764ea3d7e req-717b6367-aee3-4505-ad1b-d495530de643 service nova] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Received event network-changed-14400668-d5ea-4861-8521-351f3d71704a {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1032.337446] env[68638]: DEBUG nova.compute.manager [req-7125da27-6eda-4786-aea7-e42764ea3d7e req-717b6367-aee3-4505-ad1b-d495530de643 service nova] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Refreshing instance network info cache due to event network-changed-14400668-d5ea-4861-8521-351f3d71704a. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1032.337446] env[68638]: DEBUG oslo_concurrency.lockutils [req-7125da27-6eda-4786-aea7-e42764ea3d7e req-717b6367-aee3-4505-ad1b-d495530de643 service nova] Acquiring lock "refresh_cache-71ec29a8-5e2f-4ccd-9c22-d9721c77622e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.337446] env[68638]: DEBUG oslo_concurrency.lockutils [req-7125da27-6eda-4786-aea7-e42764ea3d7e req-717b6367-aee3-4505-ad1b-d495530de643 service nova] Acquired lock "refresh_cache-71ec29a8-5e2f-4ccd-9c22-d9721c77622e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1032.337565] env[68638]: DEBUG nova.network.neutron [req-7125da27-6eda-4786-aea7-e42764ea3d7e req-717b6367-aee3-4505-ad1b-d495530de643 service nova] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Refreshing network info cache for port 14400668-d5ea-4861-8521-351f3d71704a {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1032.339544] env[68638]: DEBUG oslo_concurrency.lockutils [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.030s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.343028] env[68638]: DEBUG nova.compute.manager [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1032.345709] env[68638]: DEBUG oslo_concurrency.lockutils [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.312s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.346109] env[68638]: DEBUG nova.objects.instance [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lazy-loading 'resources' on Instance uuid e9b8e5ad-4d47-48ad-995f-b28d0230df0f {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1032.368303] env[68638]: INFO nova.scheduler.client.report [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Deleted allocations for instance 32d43fce-837d-41d9-be11-a0c3cdb1694b [ 1032.384909] env[68638]: DEBUG nova.virt.hardware [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1032.385251] env[68638]: DEBUG nova.virt.hardware [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1032.385365] env[68638]: DEBUG nova.virt.hardware [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1032.386185] env[68638]: DEBUG nova.virt.hardware [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1032.386427] env[68638]: DEBUG nova.virt.hardware [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1032.386890] env[68638]: DEBUG nova.virt.hardware [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1032.386890] env[68638]: DEBUG nova.virt.hardware [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1032.387607] env[68638]: DEBUG nova.virt.hardware [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1032.387607] env[68638]: DEBUG nova.virt.hardware [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1032.387607] env[68638]: DEBUG nova.virt.hardware [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1032.387768] env[68638]: DEBUG nova.virt.hardware [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1032.388976] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-332ca11e-4c2a-4bd5-9257-805dd488616f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.393304] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Releasing lock "refresh_cache-df2e066d-7c71-4aec-ab9b-a339a7ff21fb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1032.401103] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8702ef3f-2753-4d72-a362-45b7d9b86460 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.419962] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Instance VIF info [] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1032.427173] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Creating folder: Project (03d2e8b57a134e8396115cea962f9b53). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1032.430870] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2392b919-5053-44f1-81e9-f7e9a91ae7d0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.444691] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Created folder: Project (03d2e8b57a134e8396115cea962f9b53) in parent group-v569734. [ 1032.444936] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Creating folder: Instances. Parent ref: group-v569998. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1032.445482] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1b5c1507-ea5a-4c16-8e53-c469be1343f2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.455348] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Created folder: Instances in parent group-v569998. [ 1032.455643] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1032.460384] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1032.460642] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ef090bfc-a463-43eb-a29c-94050922259e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.478364] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.482828] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Task: {'id': task-2834150, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.484471] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1032.484471] env[68638]: value = "task-2834156" [ 1032.484471] env[68638]: _type = "Task" [ 1032.484471] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.493142] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834156, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.580025] env[68638]: DEBUG oslo_vmware.api [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834151, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.656504] env[68638]: DEBUG oslo_concurrency.lockutils [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "cc2e9758-45ee-4e94-ad74-ba7d6c85f06d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.656853] env[68638]: DEBUG oslo_concurrency.lockutils [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "cc2e9758-45ee-4e94-ad74-ba7d6c85f06d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.679543] env[68638]: DEBUG oslo_vmware.api [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834153, 'name': Rename_Task, 'duration_secs': 0.198764} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.682933] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1032.683562] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834152, 'name': CreateVM_Task, 'duration_secs': 0.401454} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.683948] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e1d47128-7bd5-45ff-8c53-8893f9f362d5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.685357] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1032.686048] env[68638]: DEBUG oslo_concurrency.lockutils [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.686222] env[68638]: DEBUG oslo_concurrency.lockutils [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1032.686566] env[68638]: DEBUG oslo_concurrency.lockutils [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1032.687165] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d69aa525-79a3-4e87-8b09-e669d1c2539a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.691947] env[68638]: DEBUG oslo_vmware.api [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Waiting for the task: (returnval){ [ 1032.691947] env[68638]: value = "task-2834157" [ 1032.691947] env[68638]: _type = "Task" [ 1032.691947] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.693779] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 1032.693779] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e96bf3-9f0c-d35e-9e91-2e7ee0a24d69" [ 1032.693779] env[68638]: _type = "Task" [ 1032.693779] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.709146] env[68638]: DEBUG oslo_vmware.api [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834157, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.713131] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e96bf3-9f0c-d35e-9e91-2e7ee0a24d69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.718154] env[68638]: DEBUG nova.network.neutron [-] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.880293] env[68638]: DEBUG oslo_concurrency.lockutils [None req-41fdbe56-3692-4dc0-96db-0f8ac0aac5c1 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "32d43fce-837d-41d9-be11-a0c3cdb1694b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.650s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.969791] env[68638]: DEBUG oslo_vmware.api [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Task: {'id': task-2834150, 'name': PowerOnVM_Task, 'duration_secs': 0.63705} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.970070] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1032.970280] env[68638]: INFO nova.compute.manager [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Took 10.71 seconds to spawn the instance on the hypervisor. [ 1032.970489] env[68638]: DEBUG nova.compute.manager [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1032.971277] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e054f1e-febe-4724-b7dc-f7677c96332d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.997734] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834156, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.074869] env[68638]: DEBUG oslo_vmware.api [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834151, 'name': PowerOnVM_Task, 'duration_secs': 0.536313} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.077771] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1033.078105] env[68638]: INFO nova.compute.manager [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Took 8.30 seconds to spawn the instance on the hypervisor. [ 1033.078327] env[68638]: DEBUG nova.compute.manager [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1033.079392] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cfcc267-2745-4235-8464-483728514e5b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.095304] env[68638]: DEBUG nova.network.neutron [req-7125da27-6eda-4786-aea7-e42764ea3d7e req-717b6367-aee3-4505-ad1b-d495530de643 service nova] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Updated VIF entry in instance network info cache for port 14400668-d5ea-4861-8521-351f3d71704a. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1033.095304] env[68638]: DEBUG nova.network.neutron [req-7125da27-6eda-4786-aea7-e42764ea3d7e req-717b6367-aee3-4505-ad1b-d495530de643 service nova] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Updating instance_info_cache with network_info: [{"id": "14400668-d5ea-4861-8521-351f3d71704a", "address": "fa:16:3e:4d:b2:4c", "network": {"id": "104a324f-fd5a-4c74-9a7a-6126392ea10c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1310127541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3e5757d1f74492481048df4a29032ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14400668-d5", "ovs_interfaceid": "14400668-d5ea-4861-8521-351f3d71704a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.146160] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e70c9ce4-0ed7-4067-9237-bd4e838d0fd1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.153818] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-658468e7-5223-4107-8e6a-717b4ca3d5e3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.159787] env[68638]: DEBUG nova.compute.manager [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1033.187012] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d7143e-72ab-4c67-b797-1ab3dc715488 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.199135] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3b6563-a920-4bf1-9664-bf55078825c8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.209223] env[68638]: DEBUG oslo_vmware.api [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834157, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.217331] env[68638]: DEBUG nova.compute.provider_tree [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1033.222052] env[68638]: INFO nova.compute.manager [-] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Took 1.32 seconds to deallocate network for instance. [ 1033.222334] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e96bf3-9f0c-d35e-9e91-2e7ee0a24d69, 'name': SearchDatastore_Task, 'duration_secs': 0.01859} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.224010] env[68638]: DEBUG oslo_concurrency.lockutils [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.224241] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1033.224472] env[68638]: DEBUG oslo_concurrency.lockutils [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.224619] env[68638]: DEBUG oslo_concurrency.lockutils [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.224795] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1033.227333] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45f29800-e214-46ac-93c1-35acd22e2b4d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.236890] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1033.237089] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1033.237999] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-709110d4-fcb0-4595-94e6-2263266045dc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.243190] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 1033.243190] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]525cf5f9-f3c4-f33b-6c9e-678f2b55a49f" [ 1033.243190] env[68638]: _type = "Task" [ 1033.243190] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.251159] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525cf5f9-f3c4-f33b-6c9e-678f2b55a49f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.496170] env[68638]: INFO nova.compute.manager [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Took 39.86 seconds to build instance. [ 1033.501105] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834156, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.599242] env[68638]: DEBUG oslo_concurrency.lockutils [req-7125da27-6eda-4786-aea7-e42764ea3d7e req-717b6367-aee3-4505-ad1b-d495530de643 service nova] Releasing lock "refresh_cache-71ec29a8-5e2f-4ccd-9c22-d9721c77622e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.603478] env[68638]: INFO nova.compute.manager [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Took 31.82 seconds to build instance. [ 1033.704855] env[68638]: DEBUG oslo_vmware.api [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834157, 'name': PowerOnVM_Task, 'duration_secs': 0.983098} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.705884] env[68638]: DEBUG oslo_concurrency.lockutils [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1033.706180] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1033.706384] env[68638]: INFO nova.compute.manager [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Took 6.43 seconds to spawn the instance on the hypervisor. [ 1033.706559] env[68638]: DEBUG nova.compute.manager [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1033.707346] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f09a0b5-17c4-4658-978f-c14b7239a201 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.724109] env[68638]: DEBUG nova.scheduler.client.report [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1033.730275] env[68638]: DEBUG oslo_concurrency.lockutils [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1033.755019] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525cf5f9-f3c4-f33b-6c9e-678f2b55a49f, 'name': SearchDatastore_Task, 'duration_secs': 0.009307} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.755870] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97e03e71-b9bd-4666-a6f3-3d381e2367d1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.762007] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 1033.762007] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52bc7129-1ff6-9ef8-718d-4f6fd25f7e4f" [ 1033.762007] env[68638]: _type = "Task" [ 1033.762007] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.770216] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52bc7129-1ff6-9ef8-718d-4f6fd25f7e4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.956457] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1033.956933] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8b6f5b72-80de-4f93-a6a8-f5498661bd0e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.965965] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1033.965965] env[68638]: value = "task-2834158" [ 1033.965965] env[68638]: _type = "Task" [ 1033.965965] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.979476] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834158, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.999506] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6030b202-e46e-4011-aaac-4d8351f6b8a7 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Lock "dcaef2e3-eb23-4a0b-b617-2880084e03ab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.372s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1033.999712] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834156, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.105718] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3251611e-6d7e-494d-b4bd-f9976aecf418 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "4765bf70-1a72-4102-b5d3-ccedb7c383ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.332s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.172479] env[68638]: INFO nova.compute.manager [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Rebuilding instance [ 1034.208684] env[68638]: DEBUG nova.compute.manager [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1034.209592] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b410bd46-2654-4179-befd-5d85628f9dc3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.223772] env[68638]: INFO nova.compute.manager [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Took 31.00 seconds to build instance. [ 1034.229319] env[68638]: DEBUG oslo_concurrency.lockutils [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.883s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.231029] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.707s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.232927] env[68638]: INFO nova.compute.claims [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1034.253338] env[68638]: INFO nova.scheduler.client.report [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Deleted allocations for instance e9b8e5ad-4d47-48ad-995f-b28d0230df0f [ 1034.276128] env[68638]: DEBUG oslo_concurrency.lockutils [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Acquiring lock "dcaef2e3-eb23-4a0b-b617-2880084e03ab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.276881] env[68638]: DEBUG oslo_concurrency.lockutils [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Lock "dcaef2e3-eb23-4a0b-b617-2880084e03ab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.276881] env[68638]: DEBUG oslo_concurrency.lockutils [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Acquiring lock "dcaef2e3-eb23-4a0b-b617-2880084e03ab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.277116] env[68638]: DEBUG oslo_concurrency.lockutils [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Lock "dcaef2e3-eb23-4a0b-b617-2880084e03ab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.277290] env[68638]: DEBUG oslo_concurrency.lockutils [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Lock "dcaef2e3-eb23-4a0b-b617-2880084e03ab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.279617] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52bc7129-1ff6-9ef8-718d-4f6fd25f7e4f, 'name': SearchDatastore_Task, 'duration_secs': 0.010856} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.280304] env[68638]: INFO nova.compute.manager [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Terminating instance [ 1034.283069] env[68638]: DEBUG oslo_concurrency.lockutils [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.283500] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 71ec29a8-5e2f-4ccd-9c22-d9721c77622e/71ec29a8-5e2f-4ccd-9c22-d9721c77622e.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1034.285083] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4c97559a-a030-41d7-8330-03e59b9f2cbc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.296646] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 1034.296646] env[68638]: value = "task-2834159" [ 1034.296646] env[68638]: _type = "Task" [ 1034.296646] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.310164] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834159, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.362761] env[68638]: DEBUG nova.compute.manager [req-4bd901d3-50e8-405f-9680-bb9b03402ba8 req-46fc45ef-038f-4df6-a837-3984cf56ab10 service nova] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Received event network-vif-deleted-637e93a8-5c95-4b4b-8681-1ef5669b70f9 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1034.477189] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834158, 'name': PowerOffVM_Task, 'duration_secs': 0.357828} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.477512] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1034.478431] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e42bc8f5-2696-496b-8ca8-c5ad72c6768c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.511417] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e4f4fe0-5a2b-48fd-911c-7dab91ef692b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.521900] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834156, 'name': CreateVM_Task, 'duration_secs': 1.572446} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.524799] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1034.528405] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.528582] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1034.529102] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1034.529409] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3548569e-8e85-46af-aa91-a9d3a3da25f8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.536178] env[68638]: DEBUG oslo_vmware.api [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Waiting for the task: (returnval){ [ 1034.536178] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52f1e3d0-c02f-c8ed-ea6e-6eb336249ca8" [ 1034.536178] env[68638]: _type = "Task" [ 1034.536178] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.545588] env[68638]: DEBUG oslo_vmware.api [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f1e3d0-c02f-c8ed-ea6e-6eb336249ca8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.556481] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1034.556891] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-743b8edc-a46b-45f1-8b92-2e968238c138 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.565893] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1034.565893] env[68638]: value = "task-2834160" [ 1034.565893] env[68638]: _type = "Task" [ 1034.565893] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.576053] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] VM already powered off {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1034.576315] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1034.576634] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.576811] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1034.577014] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1034.577290] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b57277dc-5c10-4da0-8dc5-3c7e91af63ac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.601819] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1034.602087] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1034.602958] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4eeb432-73af-4e6e-b14b-ecec5ae1c2ad {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.609019] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1034.609019] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52807275-e262-f54e-aaea-4cbd500696ea" [ 1034.609019] env[68638]: _type = "Task" [ 1034.609019] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.620483] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52807275-e262-f54e-aaea-4cbd500696ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.726763] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7481a7d2-9820-47ff-b9a5-fc4af1611862 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Lock "2cdcff10-089b-47fd-ba41-2e3a75cd33b0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.518s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.763366] env[68638]: DEBUG oslo_concurrency.lockutils [None req-39ec21e1-8e7e-4eb3-9b96-e629febc2216 tempest-ListServersNegativeTestJSON-1942889640 tempest-ListServersNegativeTestJSON-1942889640-project-member] Lock "e9b8e5ad-4d47-48ad-995f-b28d0230df0f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.469s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.792159] env[68638]: DEBUG nova.compute.manager [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1034.792398] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1034.793367] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e5fd7e7-fed9-4564-8d1b-5f78cbc91b69 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.807285] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1034.808906] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0fea1b24-44fd-4138-a44e-0cef77e1dbc0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.811039] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834159, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.466233} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.811039] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 71ec29a8-5e2f-4ccd-9c22-d9721c77622e/71ec29a8-5e2f-4ccd-9c22-d9721c77622e.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1034.811244] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1034.811687] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d5f28acd-945a-4c1e-b09e-92d036767d52 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.816582] env[68638]: DEBUG oslo_vmware.api [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Waiting for the task: (returnval){ [ 1034.816582] env[68638]: value = "task-2834161" [ 1034.816582] env[68638]: _type = "Task" [ 1034.816582] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.817752] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 1034.817752] env[68638]: value = "task-2834162" [ 1034.817752] env[68638]: _type = "Task" [ 1034.817752] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.832018] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834162, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.835778] env[68638]: DEBUG oslo_vmware.api [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Task: {'id': task-2834161, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.048901] env[68638]: DEBUG oslo_vmware.api [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f1e3d0-c02f-c8ed-ea6e-6eb336249ca8, 'name': SearchDatastore_Task, 'duration_secs': 0.055029} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.049240] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.049473] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1035.050067] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.119404] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52807275-e262-f54e-aaea-4cbd500696ea, 'name': SearchDatastore_Task, 'duration_secs': 0.053223} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.120231] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31488f7e-eff5-4764-8bc2-ba6d5a2f2875 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.125452] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1035.125452] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]529e2e87-9818-b369-ad19-046d3c542824" [ 1035.125452] env[68638]: _type = "Task" [ 1035.125452] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.132808] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]529e2e87-9818-b369-ad19-046d3c542824, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.210584] env[68638]: INFO nova.compute.manager [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Rebuilding instance [ 1035.228871] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1035.229202] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a3f9ad85-f58a-4681-8f11-07c81407f847 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.244217] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1035.244217] env[68638]: value = "task-2834163" [ 1035.244217] env[68638]: _type = "Task" [ 1035.244217] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.252708] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834163, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.258024] env[68638]: DEBUG nova.compute.manager [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1035.258971] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f685fe-44a7-47a3-ab21-0d5c4fcbfe25 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.331269] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834162, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073909} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.336525] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1035.336919] env[68638]: DEBUG oslo_vmware.api [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Task: {'id': task-2834161, 'name': PowerOffVM_Task, 'duration_secs': 0.294052} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.338163] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d40611-6fe8-4708-9a33-d6fb856fb681 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.340168] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1035.341321] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1035.341321] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1122199a-984a-4f0e-b4af-44e35ed320cd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.362974] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 71ec29a8-5e2f-4ccd-9c22-d9721c77622e/71ec29a8-5e2f-4ccd-9c22-d9721c77622e.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1035.365731] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12f6b0ff-6871-4743-a18b-fd49b7df33b7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.386916] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 1035.386916] env[68638]: value = "task-2834165" [ 1035.386916] env[68638]: _type = "Task" [ 1035.386916] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.396652] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834165, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.407543] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1035.407760] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1035.407947] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Deleting the datastore file [datastore2] dcaef2e3-eb23-4a0b-b617-2880084e03ab {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1035.408243] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d784c9c8-a9d8-4d2f-b88f-fa0b3a6eb56d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.414876] env[68638]: DEBUG oslo_vmware.api [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Waiting for the task: (returnval){ [ 1035.414876] env[68638]: value = "task-2834166" [ 1035.414876] env[68638]: _type = "Task" [ 1035.414876] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.428096] env[68638]: DEBUG oslo_vmware.api [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Task: {'id': task-2834166, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.554534] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1afd225b-5b3c-4516-8636-a086a2ed2bd9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.563791] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da98c124-6b38-4d22-aeb1-2b73a7584f3d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.597995] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-098b007c-5b41-4eba-944c-1e4e3236c69a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.605921] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6bacda-62fb-4164-9d61-3429a81d0e04 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.619892] env[68638]: DEBUG nova.compute.provider_tree [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1035.635931] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]529e2e87-9818-b369-ad19-046d3c542824, 'name': SearchDatastore_Task, 'duration_secs': 0.009779} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.636215] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.636479] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] df2e066d-7c71-4aec-ab9b-a339a7ff21fb/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9-rescue.vmdk. {{(pid=68638) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1035.636765] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1035.636964] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1035.637357] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d316280d-4110-46ef-8338-0b30f92c71d5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.639207] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dac230c1-2fdf-446e-bca4-fa5c97a7a64e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.646152] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1035.646152] env[68638]: value = "task-2834167" [ 1035.646152] env[68638]: _type = "Task" [ 1035.646152] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.650165] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1035.650342] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1035.651350] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5952894-e9e8-41b5-a7d2-5802c592f509 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.656120] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834167, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.658838] env[68638]: DEBUG oslo_vmware.api [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Waiting for the task: (returnval){ [ 1035.658838] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52161425-b54f-490d-c551-2da197016a9e" [ 1035.658838] env[68638]: _type = "Task" [ 1035.658838] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.665733] env[68638]: DEBUG oslo_vmware.api [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52161425-b54f-490d-c551-2da197016a9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.753347] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834163, 'name': PowerOffVM_Task, 'duration_secs': 0.344076} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.753653] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1035.753891] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1035.755124] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21807b6f-3d01-42c5-b3b5-b9ecd9ab4485 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.762055] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1035.762055] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ba950162-543f-4df4-ac43-c67a2ffc40c2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.844540] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1035.844765] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1035.844966] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Deleting the datastore file [datastore2] 4765bf70-1a72-4102-b5d3-ccedb7c383ea {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1035.845287] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5788ad42-eb10-4ab8-8ad0-6faf9dc0e18b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.852049] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1035.852049] env[68638]: value = "task-2834169" [ 1035.852049] env[68638]: _type = "Task" [ 1035.852049] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.860709] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834169, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.899491] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834165, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.934378] env[68638]: DEBUG oslo_vmware.api [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Task: {'id': task-2834166, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151786} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.935420] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1035.935698] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1035.936240] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1035.936240] env[68638]: INFO nova.compute.manager [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1035.936571] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1035.936928] env[68638]: DEBUG nova.compute.manager [-] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1035.936928] env[68638]: DEBUG nova.network.neutron [-] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1036.124849] env[68638]: DEBUG nova.scheduler.client.report [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1036.159088] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834167, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.172201] env[68638]: DEBUG oslo_vmware.api [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52161425-b54f-490d-c551-2da197016a9e, 'name': SearchDatastore_Task, 'duration_secs': 0.009806} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.172201] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-351e48d9-551c-42f6-bf8c-ef7ec17a59be {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.178524] env[68638]: DEBUG oslo_vmware.api [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Waiting for the task: (returnval){ [ 1036.178524] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52249375-9d8d-a7c0-7e03-aa4fb2a12a70" [ 1036.178524] env[68638]: _type = "Task" [ 1036.178524] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.188838] env[68638]: DEBUG oslo_vmware.api [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52249375-9d8d-a7c0-7e03-aa4fb2a12a70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.276983] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1036.277341] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-558fdb39-9795-4448-ba27-34e9f68e35dd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.286191] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Waiting for the task: (returnval){ [ 1036.286191] env[68638]: value = "task-2834170" [ 1036.286191] env[68638]: _type = "Task" [ 1036.286191] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.297071] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834170, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.366658] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834169, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.30623} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.366658] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1036.366658] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1036.366950] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1036.399896] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834165, 'name': ReconfigVM_Task, 'duration_secs': 0.727155} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.400216] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 71ec29a8-5e2f-4ccd-9c22-d9721c77622e/71ec29a8-5e2f-4ccd-9c22-d9721c77622e.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1036.400932] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f0be0247-694a-453a-b58c-c830caa2d0e9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.409320] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 1036.409320] env[68638]: value = "task-2834171" [ 1036.409320] env[68638]: _type = "Task" [ 1036.409320] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.421560] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834171, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.511424] env[68638]: DEBUG nova.compute.manager [req-a6a4a990-e53e-42f6-a417-b4c59dc89b24 req-7353b3e6-eb06-4b78-9248-cc9cebca8b79 service nova] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Received event network-vif-deleted-afa7c854-c29e-429c-8b58-1c18417595ca {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1036.511424] env[68638]: INFO nova.compute.manager [req-a6a4a990-e53e-42f6-a417-b4c59dc89b24 req-7353b3e6-eb06-4b78-9248-cc9cebca8b79 service nova] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Neutron deleted interface afa7c854-c29e-429c-8b58-1c18417595ca; detaching it from the instance and deleting it from the info cache [ 1036.511424] env[68638]: DEBUG nova.network.neutron [req-a6a4a990-e53e-42f6-a417-b4c59dc89b24 req-7353b3e6-eb06-4b78-9248-cc9cebca8b79 service nova] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.630393] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.399s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1036.630671] env[68638]: DEBUG nova.compute.manager [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1036.633514] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.420s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.633514] env[68638]: DEBUG nova.objects.instance [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lazy-loading 'resources' on Instance uuid 0249ffb9-82ed-44db-bb20-e619eaa176dd {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1036.658397] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834167, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.801564} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.658548] env[68638]: INFO nova.virt.vmwareapi.ds_util [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] df2e066d-7c71-4aec-ab9b-a339a7ff21fb/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9-rescue.vmdk. [ 1036.659368] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc46065-6131-40e4-b820-0733043729f5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.684848] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] df2e066d-7c71-4aec-ab9b-a339a7ff21fb/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9-rescue.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1036.685819] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9498746d-0c9d-4451-b9ab-52af7363214e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.707398] env[68638]: DEBUG oslo_vmware.api [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52249375-9d8d-a7c0-7e03-aa4fb2a12a70, 'name': SearchDatastore_Task, 'duration_secs': 0.056527} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.708543] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.708801] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 9ddb29ae-9724-4712-af58-4b8d6546c6af/9ddb29ae-9724-4712-af58-4b8d6546c6af.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1036.709336] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1036.709336] env[68638]: value = "task-2834172" [ 1036.709336] env[68638]: _type = "Task" [ 1036.709336] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.709521] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c1170f57-4f50-43aa-88a3-99c7f8403553 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.719200] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834172, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.720287] env[68638]: DEBUG oslo_vmware.api [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Waiting for the task: (returnval){ [ 1036.720287] env[68638]: value = "task-2834173" [ 1036.720287] env[68638]: _type = "Task" [ 1036.720287] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.728013] env[68638]: DEBUG oslo_vmware.api [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834173, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.795799] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834170, 'name': PowerOffVM_Task, 'duration_secs': 0.12717} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.796042] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1036.796285] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1036.797084] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a4d6fe-00bd-44e9-91b1-0cae19d14e8f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.803656] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1036.803895] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d298e3f-6d2d-4a93-8c54-57d16ba2b1ae {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.829424] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1036.829637] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1036.829824] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Deleting the datastore file [datastore2] 2cdcff10-089b-47fd-ba41-2e3a75cd33b0 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1036.830112] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58abe176-c078-4a81-b1e8-bd2befab943c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.837219] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Waiting for the task: (returnval){ [ 1036.837219] env[68638]: value = "task-2834175" [ 1036.837219] env[68638]: _type = "Task" [ 1036.837219] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.844785] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834175, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.920993] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834171, 'name': Rename_Task, 'duration_secs': 0.159098} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.921337] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1036.921578] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cc3ac70e-b947-46ca-b3a4-d48afe8b8582 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.932286] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 1036.932286] env[68638]: value = "task-2834176" [ 1036.932286] env[68638]: _type = "Task" [ 1036.932286] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.945616] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834176, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.986935] env[68638]: DEBUG nova.network.neutron [-] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.014800] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4dc0d0cf-0b5e-4dce-bd16-d112cc6298ff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.027421] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d508de5-6367-4d92-90c8-5e26a2f8d8e6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.068030] env[68638]: DEBUG nova.compute.manager [req-a6a4a990-e53e-42f6-a417-b4c59dc89b24 req-7353b3e6-eb06-4b78-9248-cc9cebca8b79 service nova] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Detach interface failed, port_id=afa7c854-c29e-429c-8b58-1c18417595ca, reason: Instance dcaef2e3-eb23-4a0b-b617-2880084e03ab could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1037.137865] env[68638]: DEBUG nova.compute.utils [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1037.139645] env[68638]: DEBUG nova.objects.instance [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lazy-loading 'numa_topology' on Instance uuid 0249ffb9-82ed-44db-bb20-e619eaa176dd {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1037.140827] env[68638]: DEBUG nova.compute.manager [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1037.141017] env[68638]: DEBUG nova.network.neutron [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1037.197087] env[68638]: DEBUG nova.policy [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '815b8ce8a95a4f76a28506fe20117298', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2ae89c3992e04141bf24be9d9e84e302', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1037.228362] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834172, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.233617] env[68638]: DEBUG oslo_vmware.api [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834173, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505123} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.234423] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 9ddb29ae-9724-4712-af58-4b8d6546c6af/9ddb29ae-9724-4712-af58-4b8d6546c6af.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1037.234712] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1037.235537] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-71d838e3-e436-4da4-949b-003380b452b5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.242627] env[68638]: DEBUG oslo_vmware.api [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Waiting for the task: (returnval){ [ 1037.242627] env[68638]: value = "task-2834177" [ 1037.242627] env[68638]: _type = "Task" [ 1037.242627] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.251935] env[68638]: DEBUG oslo_vmware.api [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834177, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.348400] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834175, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142312} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.348621] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1037.348812] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1037.348994] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1037.413406] env[68638]: DEBUG nova.virt.hardware [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1037.413785] env[68638]: DEBUG nova.virt.hardware [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1037.414008] env[68638]: DEBUG nova.virt.hardware [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1037.414686] env[68638]: DEBUG nova.virt.hardware [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1037.414920] env[68638]: DEBUG nova.virt.hardware [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1037.415155] env[68638]: DEBUG nova.virt.hardware [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1037.415535] env[68638]: DEBUG nova.virt.hardware [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1037.415745] env[68638]: DEBUG nova.virt.hardware [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1037.416015] env[68638]: DEBUG nova.virt.hardware [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1037.416267] env[68638]: DEBUG nova.virt.hardware [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1037.416519] env[68638]: DEBUG nova.virt.hardware [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1037.417936] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc55d97-fa70-4844-9aa8-75aa8f2a217d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.426828] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a2aef99-9eec-42d9-afa4-bd3b8b379835 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.441413] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:83:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f99283c7-566e-4386-b66a-6295a6b67f68', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1037.449761] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1037.453815] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1037.454100] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e4bd8a0-e6bc-43fc-a0aa-ef07ac6c05f9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.475561] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834176, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.476871] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1037.476871] env[68638]: value = "task-2834178" [ 1037.476871] env[68638]: _type = "Task" [ 1037.476871] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.484208] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834178, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.489684] env[68638]: INFO nova.compute.manager [-] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Took 1.55 seconds to deallocate network for instance. [ 1037.567986] env[68638]: DEBUG nova.network.neutron [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Successfully created port: c0117ec5-bfa5-418d-8a27-8904ffcfadbd {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1037.641535] env[68638]: DEBUG nova.compute.manager [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1037.646854] env[68638]: DEBUG nova.objects.base [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Object Instance<0249ffb9-82ed-44db-bb20-e619eaa176dd> lazy-loaded attributes: resources,numa_topology {{(pid=68638) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1037.724518] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834172, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.752220] env[68638]: DEBUG oslo_vmware.api [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834177, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064421} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.754943] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1037.756380] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d9df62-c130-4816-b7f4-7e923b35e9d5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.778892] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 9ddb29ae-9724-4712-af58-4b8d6546c6af/9ddb29ae-9724-4712-af58-4b8d6546c6af.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1037.781934] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-908be57a-1e7e-4514-99a2-f15dd8ecf546 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.803487] env[68638]: DEBUG oslo_vmware.api [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Waiting for the task: (returnval){ [ 1037.803487] env[68638]: value = "task-2834179" [ 1037.803487] env[68638]: _type = "Task" [ 1037.803487] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.814699] env[68638]: DEBUG oslo_vmware.api [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834179, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.946014] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba6ad2d8-1c68-481d-941b-d934d6e57a15 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.955053] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834176, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.957866] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b9933ab-da46-45cf-99b3-f12edee87e19 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.991724] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b66f6f74-ad4b-48ef-a719-407a9060d8c7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.995653] env[68638]: DEBUG oslo_concurrency.lockutils [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.001363] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834178, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.004812] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d7c9d3f-6536-4c1b-a042-32bf44b566a7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.019068] env[68638]: DEBUG nova.compute.provider_tree [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1038.224949] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834172, 'name': ReconfigVM_Task, 'duration_secs': 1.217089} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.225268] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Reconfigured VM instance instance-0000005a to attach disk [datastore1] df2e066d-7c71-4aec-ab9b-a339a7ff21fb/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9-rescue.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1038.226198] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c6a8041-345f-49bd-bda2-0bdf559d2b4d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.251719] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2b9b290-627e-422b-89aa-b8b3ef233f05 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.267552] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1038.267552] env[68638]: value = "task-2834180" [ 1038.267552] env[68638]: _type = "Task" [ 1038.267552] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.278500] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834180, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.314920] env[68638]: DEBUG oslo_vmware.api [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834179, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.340790] env[68638]: DEBUG nova.objects.instance [None req-7d3bacd0-1b52-41b9-ba0f-c3e47f379afc tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Lazy-loading 'flavor' on Instance uuid da886efd-bca9-45aa-abcc-13832c66a90c {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1038.387277] env[68638]: DEBUG nova.virt.hardware [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1038.387541] env[68638]: DEBUG nova.virt.hardware [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1038.387718] env[68638]: DEBUG nova.virt.hardware [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1038.387903] env[68638]: DEBUG nova.virt.hardware [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1038.388115] env[68638]: DEBUG nova.virt.hardware [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1038.388210] env[68638]: DEBUG nova.virt.hardware [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1038.388443] env[68638]: DEBUG nova.virt.hardware [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1038.388664] env[68638]: DEBUG nova.virt.hardware [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1038.388865] env[68638]: DEBUG nova.virt.hardware [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1038.389047] env[68638]: DEBUG nova.virt.hardware [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1038.389291] env[68638]: DEBUG nova.virt.hardware [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1038.390181] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf60c803-1051-461d-a096-5f1aa28099ec {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.398636] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d7aec6-5188-4a56-ac47-921b951b6827 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.412481] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Instance VIF info [] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1038.419193] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1038.419484] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1038.419735] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0adde744-48a9-4f79-bcbb-f75d68668a0d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.437761] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1038.437761] env[68638]: value = "task-2834181" [ 1038.437761] env[68638]: _type = "Task" [ 1038.437761] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.453873] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834181, 'name': CreateVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.457982] env[68638]: DEBUG oslo_vmware.api [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834176, 'name': PowerOnVM_Task, 'duration_secs': 1.090865} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.458435] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1038.458681] env[68638]: INFO nova.compute.manager [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Took 8.60 seconds to spawn the instance on the hypervisor. [ 1038.458878] env[68638]: DEBUG nova.compute.manager [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1038.459714] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc089fb-95e5-4634-9af2-262a5517c78c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.501497] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834178, 'name': CreateVM_Task, 'duration_secs': 0.575624} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.501670] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1038.502399] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.502654] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1038.502962] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1038.503227] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e67c963-28bd-4dae-a812-023d54778208 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.508601] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1038.508601] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52f73953-4581-8850-0624-172f7aadfcac" [ 1038.508601] env[68638]: _type = "Task" [ 1038.508601] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.518151] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f73953-4581-8850-0624-172f7aadfcac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.522175] env[68638]: DEBUG nova.scheduler.client.report [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1038.657338] env[68638]: DEBUG nova.compute.manager [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1038.678658] env[68638]: DEBUG nova.virt.hardware [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1038.678910] env[68638]: DEBUG nova.virt.hardware [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1038.679082] env[68638]: DEBUG nova.virt.hardware [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1038.679310] env[68638]: DEBUG nova.virt.hardware [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1038.679471] env[68638]: DEBUG nova.virt.hardware [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1038.679622] env[68638]: DEBUG nova.virt.hardware [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1038.679836] env[68638]: DEBUG nova.virt.hardware [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1038.679998] env[68638]: DEBUG nova.virt.hardware [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1038.680181] env[68638]: DEBUG nova.virt.hardware [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1038.680342] env[68638]: DEBUG nova.virt.hardware [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1038.680517] env[68638]: DEBUG nova.virt.hardware [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1038.681420] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af34fc35-cfa8-4a90-82de-762a430a19c3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.689236] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb85fc63-0d4b-47e9-a3d1-ff9c5ce20997 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.777261] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834180, 'name': ReconfigVM_Task, 'duration_secs': 0.260283} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.777527] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1038.777779] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-01d3128c-7abc-4d04-a4db-d6d3d38595a5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.782944] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1038.782944] env[68638]: value = "task-2834182" [ 1038.782944] env[68638]: _type = "Task" [ 1038.782944] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.790247] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834182, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.814924] env[68638]: DEBUG oslo_vmware.api [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834179, 'name': ReconfigVM_Task, 'duration_secs': 0.631439} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.814924] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 9ddb29ae-9724-4712-af58-4b8d6546c6af/9ddb29ae-9724-4712-af58-4b8d6546c6af.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1038.815280] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6964f9dd-d799-4fe0-9b9d-fc187904b616 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.822097] env[68638]: DEBUG oslo_vmware.api [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Waiting for the task: (returnval){ [ 1038.822097] env[68638]: value = "task-2834183" [ 1038.822097] env[68638]: _type = "Task" [ 1038.822097] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.833965] env[68638]: DEBUG oslo_vmware.api [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834183, 'name': Rename_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.846075] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7d3bacd0-1b52-41b9-ba0f-c3e47f379afc tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Acquiring lock "refresh_cache-da886efd-bca9-45aa-abcc-13832c66a90c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.846287] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7d3bacd0-1b52-41b9-ba0f-c3e47f379afc tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Acquired lock "refresh_cache-da886efd-bca9-45aa-abcc-13832c66a90c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1038.948453] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834181, 'name': CreateVM_Task, 'duration_secs': 0.326254} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.948648] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1038.949097] env[68638]: DEBUG oslo_concurrency.lockutils [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.952464] env[68638]: DEBUG nova.compute.manager [req-63e41840-86de-4748-bca9-7ce14ca9f403 req-949d2398-33f7-4fae-9545-1cd5706fb90d service nova] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Received event network-vif-plugged-c0117ec5-bfa5-418d-8a27-8904ffcfadbd {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1038.952684] env[68638]: DEBUG oslo_concurrency.lockutils [req-63e41840-86de-4748-bca9-7ce14ca9f403 req-949d2398-33f7-4fae-9545-1cd5706fb90d service nova] Acquiring lock "0be6f174-fad2-4ee3-be07-b6190073b40c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.952934] env[68638]: DEBUG oslo_concurrency.lockutils [req-63e41840-86de-4748-bca9-7ce14ca9f403 req-949d2398-33f7-4fae-9545-1cd5706fb90d service nova] Lock "0be6f174-fad2-4ee3-be07-b6190073b40c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.953105] env[68638]: DEBUG oslo_concurrency.lockutils [req-63e41840-86de-4748-bca9-7ce14ca9f403 req-949d2398-33f7-4fae-9545-1cd5706fb90d service nova] Lock "0be6f174-fad2-4ee3-be07-b6190073b40c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.953314] env[68638]: DEBUG nova.compute.manager [req-63e41840-86de-4748-bca9-7ce14ca9f403 req-949d2398-33f7-4fae-9545-1cd5706fb90d service nova] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] No waiting events found dispatching network-vif-plugged-c0117ec5-bfa5-418d-8a27-8904ffcfadbd {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1038.953498] env[68638]: WARNING nova.compute.manager [req-63e41840-86de-4748-bca9-7ce14ca9f403 req-949d2398-33f7-4fae-9545-1cd5706fb90d service nova] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Received unexpected event network-vif-plugged-c0117ec5-bfa5-418d-8a27-8904ffcfadbd for instance with vm_state building and task_state spawning. [ 1038.978907] env[68638]: INFO nova.compute.manager [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Took 34.45 seconds to build instance. [ 1039.021102] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f73953-4581-8850-0624-172f7aadfcac, 'name': SearchDatastore_Task, 'duration_secs': 0.010749} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.021102] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1039.021102] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1039.021102] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.021102] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.021102] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1039.021102] env[68638]: DEBUG oslo_concurrency.lockutils [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.021102] env[68638]: DEBUG oslo_concurrency.lockutils [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1039.021102] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca5dc0f9-e248-4207-ac4c-003e1da68291 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.023470] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa7e493e-4ef0-434a-8aa3-37b1d924eebe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.028036] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.395s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.031085] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Waiting for the task: (returnval){ [ 1039.031085] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5241966c-0e57-7707-ff5c-fdb17292f1ff" [ 1039.031085] env[68638]: _type = "Task" [ 1039.031085] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.031697] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.554s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.031697] env[68638]: DEBUG nova.objects.instance [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lazy-loading 'resources' on Instance uuid 4c954bb4-6291-47d5-a65c-0ad92a0fd193 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1039.043418] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1039.046022] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1039.049315] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdc3f73c-8e79-4e5d-a3a7-b6959554baa8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.051952] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5241966c-0e57-7707-ff5c-fdb17292f1ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.052724] env[68638]: DEBUG nova.network.neutron [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Successfully updated port: c0117ec5-bfa5-418d-8a27-8904ffcfadbd {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1039.059785] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1039.059785] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ad1cd8-f72a-d6e4-c1d2-6f4a00384dc5" [ 1039.059785] env[68638]: _type = "Task" [ 1039.059785] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.068664] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ad1cd8-f72a-d6e4-c1d2-6f4a00384dc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.211829] env[68638]: DEBUG nova.network.neutron [None req-7d3bacd0-1b52-41b9-ba0f-c3e47f379afc tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1039.231675] env[68638]: DEBUG nova.compute.manager [req-2fbb4ccd-0524-49f6-b752-1414d4b2f79c req-140d3afe-4b96-4bba-a03e-37f85fb85274 service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Received event network-changed-64e16852-058c-41a3-804c-d16bb756b439 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1039.231810] env[68638]: DEBUG nova.compute.manager [req-2fbb4ccd-0524-49f6-b752-1414d4b2f79c req-140d3afe-4b96-4bba-a03e-37f85fb85274 service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Refreshing instance network info cache due to event network-changed-64e16852-058c-41a3-804c-d16bb756b439. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1039.231973] env[68638]: DEBUG oslo_concurrency.lockutils [req-2fbb4ccd-0524-49f6-b752-1414d4b2f79c req-140d3afe-4b96-4bba-a03e-37f85fb85274 service nova] Acquiring lock "refresh_cache-da886efd-bca9-45aa-abcc-13832c66a90c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.293587] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834182, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.330973] env[68638]: DEBUG oslo_vmware.api [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834183, 'name': Rename_Task, 'duration_secs': 0.143395} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.331288] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1039.331545] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-02ee1bdb-bb7e-41cc-bf17-824162d7575c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.338745] env[68638]: DEBUG oslo_vmware.api [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Waiting for the task: (returnval){ [ 1039.338745] env[68638]: value = "task-2834184" [ 1039.338745] env[68638]: _type = "Task" [ 1039.338745] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.346168] env[68638]: DEBUG oslo_vmware.api [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834184, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.481413] env[68638]: DEBUG oslo_concurrency.lockutils [None req-baf7e5cd-82dc-4f53-8156-4624687e9a33 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "71ec29a8-5e2f-4ccd-9c22-d9721c77622e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.960s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.541703] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5c52d386-d2b0-4851-ab6d-e877142a7326 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "0249ffb9-82ed-44db-bb20-e619eaa176dd" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 33.939s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.544113] env[68638]: DEBUG oslo_concurrency.lockutils [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "0249ffb9-82ed-44db-bb20-e619eaa176dd" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 10.525s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.544457] env[68638]: INFO nova.compute.manager [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Unshelving [ 1039.553103] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5241966c-0e57-7707-ff5c-fdb17292f1ff, 'name': SearchDatastore_Task, 'duration_secs': 0.013512} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.553383] env[68638]: DEBUG oslo_concurrency.lockutils [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1039.553699] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1039.553884] env[68638]: DEBUG oslo_concurrency.lockutils [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.559415] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "refresh_cache-0be6f174-fad2-4ee3-be07-b6190073b40c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.559595] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired lock "refresh_cache-0be6f174-fad2-4ee3-be07-b6190073b40c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.559724] env[68638]: DEBUG nova.network.neutron [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1039.569962] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ad1cd8-f72a-d6e4-c1d2-6f4a00384dc5, 'name': SearchDatastore_Task, 'duration_secs': 0.009054} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.570772] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73dd54f8-f4d7-48c6-b6c0-41a9121678c8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.578950] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1039.578950] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e536e6-3c9d-a223-6aff-42b0cb5ab4c4" [ 1039.578950] env[68638]: _type = "Task" [ 1039.578950] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.588472] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e536e6-3c9d-a223-6aff-42b0cb5ab4c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.798058] env[68638]: DEBUG oslo_vmware.api [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834182, 'name': PowerOnVM_Task, 'duration_secs': 0.694332} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.798354] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1039.801602] env[68638]: DEBUG nova.compute.manager [None req-fd15886d-2b64-4c33-a765-084bf906f21c tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1039.802916] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da65c5d0-86b1-4821-8b5b-f6a6dfd78a60 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.814234] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2b4312-02ee-4986-925e-3b8c07423d86 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.822143] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd88b91-425e-4f44-b7dd-80368e5c0d3e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.860167] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d27a2a27-e2dd-49bf-91b1-af2fee08bc32 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.871247] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-644d69b1-cda6-4ecf-b165-6bec471fcd9a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.875247] env[68638]: DEBUG oslo_vmware.api [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834184, 'name': PowerOnVM_Task, 'duration_secs': 0.471539} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.875808] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1039.876438] env[68638]: INFO nova.compute.manager [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Took 7.53 seconds to spawn the instance on the hypervisor. [ 1039.876438] env[68638]: DEBUG nova.compute.manager [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1039.879280] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05407783-7b90-487c-b417-4f502465d61a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.889986] env[68638]: DEBUG nova.compute.provider_tree [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1039.998714] env[68638]: DEBUG nova.network.neutron [None req-7d3bacd0-1b52-41b9-ba0f-c3e47f379afc tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Updating instance_info_cache with network_info: [{"id": "64e16852-058c-41a3-804c-d16bb756b439", "address": "fa:16:3e:4b:c3:15", "network": {"id": "80119fab-23a5-4556-af67-8892a45697a4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1071287649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e89fff19d6c461e8818d182dfd7d45e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64e16852-05", "ovs_interfaceid": "64e16852-058c-41a3-804c-d16bb756b439", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.090474] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e536e6-3c9d-a223-6aff-42b0cb5ab4c4, 'name': SearchDatastore_Task, 'duration_secs': 0.013441} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.091276] env[68638]: DEBUG nova.network.neutron [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1040.093221] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1040.093728] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 4765bf70-1a72-4102-b5d3-ccedb7c383ea/4765bf70-1a72-4102-b5d3-ccedb7c383ea.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1040.093992] env[68638]: DEBUG oslo_concurrency.lockutils [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1040.094268] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1040.094463] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f2dec05f-5dec-4b30-9e58-36e10af27ef6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.097058] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3dc0f3b0-8f85-4490-a8ed-d87feb135e3a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.104646] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1040.104646] env[68638]: value = "task-2834185" [ 1040.104646] env[68638]: _type = "Task" [ 1040.104646] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.106128] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1040.106313] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1040.109720] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b304753-606a-4d9c-8833-80b7688aef11 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.120393] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834185, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.120693] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Waiting for the task: (returnval){ [ 1040.120693] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5245ff77-02d0-1155-1f6d-220aaeed82db" [ 1040.120693] env[68638]: _type = "Task" [ 1040.120693] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.129180] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5245ff77-02d0-1155-1f6d-220aaeed82db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.242883] env[68638]: DEBUG nova.network.neutron [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Updating instance_info_cache with network_info: [{"id": "c0117ec5-bfa5-418d-8a27-8904ffcfadbd", "address": "fa:16:3e:59:19:81", "network": {"id": "4ccf9e56-9fb3-48ff-bf2d-a35faedb905b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1191830363-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ae89c3992e04141bf24be9d9e84e302", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0117ec5-bf", "ovs_interfaceid": "c0117ec5-bfa5-418d-8a27-8904ffcfadbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.393268] env[68638]: DEBUG nova.scheduler.client.report [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1040.403923] env[68638]: INFO nova.compute.manager [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Took 35.75 seconds to build instance. [ 1040.501352] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7d3bacd0-1b52-41b9-ba0f-c3e47f379afc tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Releasing lock "refresh_cache-da886efd-bca9-45aa-abcc-13832c66a90c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1040.501656] env[68638]: DEBUG nova.compute.manager [None req-7d3bacd0-1b52-41b9-ba0f-c3e47f379afc tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Inject network info {{(pid=68638) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 1040.502640] env[68638]: DEBUG nova.compute.manager [None req-7d3bacd0-1b52-41b9-ba0f-c3e47f379afc tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] network_info to inject: |[{"id": "64e16852-058c-41a3-804c-d16bb756b439", "address": "fa:16:3e:4b:c3:15", "network": {"id": "80119fab-23a5-4556-af67-8892a45697a4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1071287649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e89fff19d6c461e8818d182dfd7d45e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64e16852-05", "ovs_interfaceid": "64e16852-058c-41a3-804c-d16bb756b439", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 1040.508188] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7d3bacd0-1b52-41b9-ba0f-c3e47f379afc tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Reconfiguring VM instance to set the machine id {{(pid=68638) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1040.511571] env[68638]: DEBUG oslo_concurrency.lockutils [req-2fbb4ccd-0524-49f6-b752-1414d4b2f79c req-140d3afe-4b96-4bba-a03e-37f85fb85274 service nova] Acquired lock "refresh_cache-da886efd-bca9-45aa-abcc-13832c66a90c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1040.511571] env[68638]: DEBUG nova.network.neutron [req-2fbb4ccd-0524-49f6-b752-1414d4b2f79c req-140d3afe-4b96-4bba-a03e-37f85fb85274 service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Refreshing network info cache for port 64e16852-058c-41a3-804c-d16bb756b439 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1040.511571] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-611119dd-ea2f-4683-a3b6-a86f6e8f2fbf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.543195] env[68638]: DEBUG oslo_vmware.api [None req-7d3bacd0-1b52-41b9-ba0f-c3e47f379afc tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Waiting for the task: (returnval){ [ 1040.543195] env[68638]: value = "task-2834186" [ 1040.543195] env[68638]: _type = "Task" [ 1040.543195] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.554926] env[68638]: DEBUG nova.compute.utils [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1040.558779] env[68638]: DEBUG oslo_vmware.api [None req-7d3bacd0-1b52-41b9-ba0f-c3e47f379afc tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': task-2834186, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.616743] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834185, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.632557] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5245ff77-02d0-1155-1f6d-220aaeed82db, 'name': SearchDatastore_Task, 'duration_secs': 0.017884} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.637122] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13db672b-3dfb-4624-9224-111720cb1024 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.644504] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Waiting for the task: (returnval){ [ 1040.644504] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52468d3f-e877-4991-b86d-a646a5795748" [ 1040.644504] env[68638]: _type = "Task" [ 1040.644504] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.657047] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52468d3f-e877-4991-b86d-a646a5795748, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.747135] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Releasing lock "refresh_cache-0be6f174-fad2-4ee3-be07-b6190073b40c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1040.747135] env[68638]: DEBUG nova.compute.manager [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Instance network_info: |[{"id": "c0117ec5-bfa5-418d-8a27-8904ffcfadbd", "address": "fa:16:3e:59:19:81", "network": {"id": "4ccf9e56-9fb3-48ff-bf2d-a35faedb905b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1191830363-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ae89c3992e04141bf24be9d9e84e302", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0117ec5-bf", "ovs_interfaceid": "c0117ec5-bfa5-418d-8a27-8904ffcfadbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1040.747353] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:19:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e547d234-640c-449b-8279-0b16f75d6627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c0117ec5-bfa5-418d-8a27-8904ffcfadbd', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1040.759360] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1040.759666] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1040.759957] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-51cf4eb3-775c-4e24-80ac-0590f6939ea5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.779660] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1040.779660] env[68638]: value = "task-2834187" [ 1040.779660] env[68638]: _type = "Task" [ 1040.779660] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.787695] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834187, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.901540] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.867s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1040.903092] env[68638]: DEBUG oslo_concurrency.lockutils [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.197s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1040.905245] env[68638]: INFO nova.compute.claims [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1040.910725] env[68638]: DEBUG oslo_concurrency.lockutils [None req-bace5861-40e2-48a2-8049-04fe37031cf8 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Lock "9ddb29ae-9724-4712-af58-4b8d6546c6af" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.263s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1040.927036] env[68638]: INFO nova.scheduler.client.report [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Deleted allocations for instance 4c954bb4-6291-47d5-a65c-0ad92a0fd193 [ 1040.997976] env[68638]: DEBUG nova.compute.manager [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1040.998947] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-341e0798-72aa-488f-80bc-bd4e962d541d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.056838] env[68638]: DEBUG oslo_vmware.api [None req-7d3bacd0-1b52-41b9-ba0f-c3e47f379afc tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': task-2834186, 'name': ReconfigVM_Task, 'duration_secs': 0.227028} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.057134] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7d3bacd0-1b52-41b9-ba0f-c3e47f379afc tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Reconfigured VM instance to set the machine id {{(pid=68638) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1041.061569] env[68638]: INFO nova.virt.block_device [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Booting with volume e62ecc96-280f-49b1-b4a1-915281c6d7c5 at /dev/sdb [ 1041.106716] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-54d3fd55-960e-46c0-9dde-6237bd15074b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.111243] env[68638]: DEBUG nova.compute.manager [req-742263a2-6f6d-42dd-aa84-c6e628d17451 req-eb6e2edb-942e-4885-a1d2-413a14bc3b44 service nova] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Received event network-changed-c0117ec5-bfa5-418d-8a27-8904ffcfadbd {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1041.111444] env[68638]: DEBUG nova.compute.manager [req-742263a2-6f6d-42dd-aa84-c6e628d17451 req-eb6e2edb-942e-4885-a1d2-413a14bc3b44 service nova] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Refreshing instance network info cache due to event network-changed-c0117ec5-bfa5-418d-8a27-8904ffcfadbd. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1041.111661] env[68638]: DEBUG oslo_concurrency.lockutils [req-742263a2-6f6d-42dd-aa84-c6e628d17451 req-eb6e2edb-942e-4885-a1d2-413a14bc3b44 service nova] Acquiring lock "refresh_cache-0be6f174-fad2-4ee3-be07-b6190073b40c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.111807] env[68638]: DEBUG oslo_concurrency.lockutils [req-742263a2-6f6d-42dd-aa84-c6e628d17451 req-eb6e2edb-942e-4885-a1d2-413a14bc3b44 service nova] Acquired lock "refresh_cache-0be6f174-fad2-4ee3-be07-b6190073b40c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.112170] env[68638]: DEBUG nova.network.neutron [req-742263a2-6f6d-42dd-aa84-c6e628d17451 req-eb6e2edb-942e-4885-a1d2-413a14bc3b44 service nova] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Refreshing network info cache for port c0117ec5-bfa5-418d-8a27-8904ffcfadbd {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1041.115125] env[68638]: DEBUG nova.objects.instance [None req-cf2cc6c9-aed6-4129-bc42-2c690dbac57b tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Lazy-loading 'flavor' on Instance uuid da886efd-bca9-45aa-abcc-13832c66a90c {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1041.126529] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834185, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.603689} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.128441] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 4765bf70-1a72-4102-b5d3-ccedb7c383ea/4765bf70-1a72-4102-b5d3-ccedb7c383ea.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1041.128441] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1041.128441] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0aa5778e-acda-4230-b5cd-78638bd229b0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.133217] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-984863c8-2e8c-4cf1-9835-ff7fcfc7294b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.143953] env[68638]: DEBUG nova.network.neutron [req-2fbb4ccd-0524-49f6-b752-1414d4b2f79c req-140d3afe-4b96-4bba-a03e-37f85fb85274 service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Updated VIF entry in instance network info cache for port 64e16852-058c-41a3-804c-d16bb756b439. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1041.144328] env[68638]: DEBUG nova.network.neutron [req-2fbb4ccd-0524-49f6-b752-1414d4b2f79c req-140d3afe-4b96-4bba-a03e-37f85fb85274 service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Updating instance_info_cache with network_info: [{"id": "64e16852-058c-41a3-804c-d16bb756b439", "address": "fa:16:3e:4b:c3:15", "network": {"id": "80119fab-23a5-4556-af67-8892a45697a4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1071287649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e89fff19d6c461e8818d182dfd7d45e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64e16852-05", "ovs_interfaceid": "64e16852-058c-41a3-804c-d16bb756b439", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.155312] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1041.155312] env[68638]: value = "task-2834188" [ 1041.155312] env[68638]: _type = "Task" [ 1041.155312] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.169219] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52468d3f-e877-4991-b86d-a646a5795748, 'name': SearchDatastore_Task, 'duration_secs': 0.048406} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.186117] env[68638]: DEBUG oslo_concurrency.lockutils [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1041.186457] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 2cdcff10-089b-47fd-ba41-2e3a75cd33b0/2cdcff10-089b-47fd-ba41-2e3a75cd33b0.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1041.187442] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834188, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.188675] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7b63d4b4-a144-46ae-9bcf-fc497595b54f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.191091] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fd33ec89-b12b-41a3-a923-17f8c1831a32 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.203325] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c036777e-e7bd-4801-bdf6-5fa167934d0e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.214810] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Waiting for the task: (returnval){ [ 1041.214810] env[68638]: value = "task-2834189" [ 1041.214810] env[68638]: _type = "Task" [ 1041.214810] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.243756] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20066e8-6228-48c1-aec0-49fbe1c150f1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.246667] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834189, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.252589] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8eaac29-afe1-43d4-b8b1-50bc2cd42282 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.265507] env[68638]: DEBUG nova.virt.block_device [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Updating existing volume attachment record: 04151886-8b4d-4fae-846f-77ae87f82b8d {{(pid=68638) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1041.290037] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834187, 'name': CreateVM_Task, 'duration_secs': 0.32623} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.290227] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1041.290919] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.291115] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.291432] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1041.291694] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6375adf8-5b0d-4d73-8639-3be2878b3bb8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.296415] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1041.296415] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52c12ebc-9805-c9ee-3304-cb5a267e8ac9" [ 1041.296415] env[68638]: _type = "Task" [ 1041.296415] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.303797] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52c12ebc-9805-c9ee-3304-cb5a267e8ac9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.345042] env[68638]: INFO nova.compute.manager [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Rebuilding instance [ 1041.387493] env[68638]: DEBUG nova.compute.manager [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1041.388399] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5143591-ed86-4b1f-a23b-6fbde9f1640c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.434971] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a7e1692-2c8e-4a0b-ab54-f0a2a8caf446 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "4c954bb4-6291-47d5-a65c-0ad92a0fd193" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.796s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1041.511577] env[68638]: INFO nova.compute.manager [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] instance snapshotting [ 1041.513778] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940d606a-7bef-46bc-959c-8f65ae9928ac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.534697] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76529f47-a660-41bb-a4a8-ff3f5c1f6ab2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.623987] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cf2cc6c9-aed6-4129-bc42-2c690dbac57b tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Acquiring lock "refresh_cache-da886efd-bca9-45aa-abcc-13832c66a90c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.647836] env[68638]: DEBUG oslo_concurrency.lockutils [req-2fbb4ccd-0524-49f6-b752-1414d4b2f79c req-140d3afe-4b96-4bba-a03e-37f85fb85274 service nova] Releasing lock "refresh_cache-da886efd-bca9-45aa-abcc-13832c66a90c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1041.648339] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cf2cc6c9-aed6-4129-bc42-2c690dbac57b tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Acquired lock "refresh_cache-da886efd-bca9-45aa-abcc-13832c66a90c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.667376] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834188, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084252} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.667707] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1041.668625] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d85bdb09-4b5c-4851-8423-f8abb08b5813 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.693072] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] 4765bf70-1a72-4102-b5d3-ccedb7c383ea/4765bf70-1a72-4102-b5d3-ccedb7c383ea.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1041.693450] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d7f12df-a869-4d11-bcc2-615af771d0e2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.709852] env[68638]: INFO nova.compute.manager [None req-55909d0d-4a47-4bc5-a3d8-141885eddcdd tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Unrescuing [ 1041.710157] env[68638]: DEBUG oslo_concurrency.lockutils [None req-55909d0d-4a47-4bc5-a3d8-141885eddcdd tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "refresh_cache-df2e066d-7c71-4aec-ab9b-a339a7ff21fb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.710312] env[68638]: DEBUG oslo_concurrency.lockutils [None req-55909d0d-4a47-4bc5-a3d8-141885eddcdd tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquired lock "refresh_cache-df2e066d-7c71-4aec-ab9b-a339a7ff21fb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.710484] env[68638]: DEBUG nova.network.neutron [None req-55909d0d-4a47-4bc5-a3d8-141885eddcdd tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1041.717954] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1041.717954] env[68638]: value = "task-2834193" [ 1041.717954] env[68638]: _type = "Task" [ 1041.717954] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.736404] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834189, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.737379] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834193, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.807140] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52c12ebc-9805-c9ee-3304-cb5a267e8ac9, 'name': SearchDatastore_Task, 'duration_secs': 0.061367} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.807539] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1041.807792] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1041.808066] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.808223] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.808409] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1041.809850] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a71464e-200b-413b-855f-bdcd6c69be8d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.822519] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1041.822678] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1041.823563] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5132521d-f458-44d7-84c3-88f3c60defca {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.830341] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1041.830341] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5234a2b1-5caa-91ff-6af2-9cde5a93ca48" [ 1041.830341] env[68638]: _type = "Task" [ 1041.830341] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.839031] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5234a2b1-5caa-91ff-6af2-9cde5a93ca48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.968434] env[68638]: DEBUG nova.network.neutron [req-742263a2-6f6d-42dd-aa84-c6e628d17451 req-eb6e2edb-942e-4885-a1d2-413a14bc3b44 service nova] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Updated VIF entry in instance network info cache for port c0117ec5-bfa5-418d-8a27-8904ffcfadbd. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1041.968758] env[68638]: DEBUG nova.network.neutron [req-742263a2-6f6d-42dd-aa84-c6e628d17451 req-eb6e2edb-942e-4885-a1d2-413a14bc3b44 service nova] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Updating instance_info_cache with network_info: [{"id": "c0117ec5-bfa5-418d-8a27-8904ffcfadbd", "address": "fa:16:3e:59:19:81", "network": {"id": "4ccf9e56-9fb3-48ff-bf2d-a35faedb905b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1191830363-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ae89c3992e04141bf24be9d9e84e302", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0117ec5-bf", "ovs_interfaceid": "c0117ec5-bfa5-418d-8a27-8904ffcfadbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.046811] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Creating Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1042.047194] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-8b79d4aa-bcfd-4201-82ec-6ffcb9a2a515 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.059891] env[68638]: DEBUG oslo_vmware.api [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 1042.059891] env[68638]: value = "task-2834194" [ 1042.059891] env[68638]: _type = "Task" [ 1042.059891] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.068881] env[68638]: DEBUG oslo_vmware.api [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834194, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.137095] env[68638]: DEBUG nova.network.neutron [None req-cf2cc6c9-aed6-4129-bc42-2c690dbac57b tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1042.210901] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c73cc0-e018-4f75-b664-6596bf4f5081 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.223392] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5afd592e-f2b4-4d0b-982c-783c4e32ea21 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.235748] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834189, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.272529] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834193, 'name': ReconfigVM_Task, 'duration_secs': 0.372521} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.276032] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Reconfigured VM instance instance-0000005e to attach disk [datastore2] 4765bf70-1a72-4102-b5d3-ccedb7c383ea/4765bf70-1a72-4102-b5d3-ccedb7c383ea.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1042.277514] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0adef86e-9c4c-4f8d-b11c-a9d901eb393e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.280439] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8bdf682d-01e7-45ee-9d4b-43f5a095d8f6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.289274] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1042.289274] env[68638]: value = "task-2834195" [ 1042.289274] env[68638]: _type = "Task" [ 1042.289274] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.290527] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f58eb43b-2e71-4508-963e-9c92c23c3257 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.305040] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834195, 'name': Rename_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.312999] env[68638]: DEBUG nova.compute.provider_tree [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1042.343487] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5234a2b1-5caa-91ff-6af2-9cde5a93ca48, 'name': SearchDatastore_Task, 'duration_secs': 0.011946} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.346927] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4cbe6158-83bf-457f-be18-301fe23b8915 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.352313] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1042.352313] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5249aa06-6913-f364-b62d-1717b3829f45" [ 1042.352313] env[68638]: _type = "Task" [ 1042.352313] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.360431] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5249aa06-6913-f364-b62d-1717b3829f45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.406293] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1042.406536] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c4e31fde-5f8c-4696-8ed6-7fa49bf3085d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.414291] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Waiting for the task: (returnval){ [ 1042.414291] env[68638]: value = "task-2834196" [ 1042.414291] env[68638]: _type = "Task" [ 1042.414291] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.424854] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834196, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.471982] env[68638]: DEBUG oslo_concurrency.lockutils [req-742263a2-6f6d-42dd-aa84-c6e628d17451 req-eb6e2edb-942e-4885-a1d2-413a14bc3b44 service nova] Releasing lock "refresh_cache-0be6f174-fad2-4ee3-be07-b6190073b40c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1042.512305] env[68638]: DEBUG nova.network.neutron [None req-55909d0d-4a47-4bc5-a3d8-141885eddcdd tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Updating instance_info_cache with network_info: [{"id": "53ebdba3-fcaa-435f-a048-dd22fa9cc3b5", "address": "fa:16:3e:a8:3e:ae", "network": {"id": "5f368894-f202-48ed-bdd5-62442b47a35d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2025484418-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e7777e8e5d342d68e2f54e23d125314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53ebdba3-fc", "ovs_interfaceid": "53ebdba3-fcaa-435f-a048-dd22fa9cc3b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.571114] env[68638]: DEBUG oslo_vmware.api [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834194, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.728554] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834189, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.804779] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834195, 'name': Rename_Task, 'duration_secs': 0.144845} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.805301] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1042.805653] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-afc557a2-0756-4d07-86da-6b6aa8f26875 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.814024] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1042.814024] env[68638]: value = "task-2834197" [ 1042.814024] env[68638]: _type = "Task" [ 1042.814024] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.816527] env[68638]: DEBUG nova.scheduler.client.report [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1042.823487] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834197, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.866882] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5249aa06-6913-f364-b62d-1717b3829f45, 'name': SearchDatastore_Task, 'duration_secs': 0.012773} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.867136] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1042.867412] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 0be6f174-fad2-4ee3-be07-b6190073b40c/0be6f174-fad2-4ee3-be07-b6190073b40c.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1042.867694] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0b8b6ac4-c7f2-4fe4-b41e-6f3debe97927 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.874102] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1042.874102] env[68638]: value = "task-2834198" [ 1042.874102] env[68638]: _type = "Task" [ 1042.874102] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.882651] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834198, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.906556] env[68638]: DEBUG nova.network.neutron [None req-cf2cc6c9-aed6-4129-bc42-2c690dbac57b tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Updating instance_info_cache with network_info: [{"id": "64e16852-058c-41a3-804c-d16bb756b439", "address": "fa:16:3e:4b:c3:15", "network": {"id": "80119fab-23a5-4556-af67-8892a45697a4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1071287649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e89fff19d6c461e8818d182dfd7d45e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64e16852-05", "ovs_interfaceid": "64e16852-058c-41a3-804c-d16bb756b439", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.925099] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834196, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.939442] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3949a0f9-22df-4db1-a720-168dd81f56e6 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "1b176c5d-e77c-410b-b282-b7bba65359a9" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1042.939800] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3949a0f9-22df-4db1-a720-168dd81f56e6 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "1b176c5d-e77c-410b-b282-b7bba65359a9" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.015622] env[68638]: DEBUG oslo_concurrency.lockutils [None req-55909d0d-4a47-4bc5-a3d8-141885eddcdd tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Releasing lock "refresh_cache-df2e066d-7c71-4aec-ab9b-a339a7ff21fb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1043.016475] env[68638]: DEBUG nova.objects.instance [None req-55909d0d-4a47-4bc5-a3d8-141885eddcdd tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lazy-loading 'flavor' on Instance uuid df2e066d-7c71-4aec-ab9b-a339a7ff21fb {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1043.072509] env[68638]: DEBUG oslo_vmware.api [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834194, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.136225] env[68638]: DEBUG nova.compute.manager [req-9bd283ed-1c3b-4469-a824-1d9dff63d397 req-b0bfb620-b444-433d-b64e-c7b279212f8d service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Received event network-changed-64e16852-058c-41a3-804c-d16bb756b439 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1043.136508] env[68638]: DEBUG nova.compute.manager [req-9bd283ed-1c3b-4469-a824-1d9dff63d397 req-b0bfb620-b444-433d-b64e-c7b279212f8d service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Refreshing instance network info cache due to event network-changed-64e16852-058c-41a3-804c-d16bb756b439. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1043.136752] env[68638]: DEBUG oslo_concurrency.lockutils [req-9bd283ed-1c3b-4469-a824-1d9dff63d397 req-b0bfb620-b444-433d-b64e-c7b279212f8d service nova] Acquiring lock "refresh_cache-da886efd-bca9-45aa-abcc-13832c66a90c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.233071] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834189, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.656262} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.233346] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 2cdcff10-089b-47fd-ba41-2e3a75cd33b0/2cdcff10-089b-47fd-ba41-2e3a75cd33b0.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1043.233563] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1043.233849] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d08dd6b7-eeab-4852-8341-a31fbf2bdf65 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.241039] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Waiting for the task: (returnval){ [ 1043.241039] env[68638]: value = "task-2834199" [ 1043.241039] env[68638]: _type = "Task" [ 1043.241039] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.253021] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834199, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.322199] env[68638]: DEBUG oslo_concurrency.lockutils [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.419s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.322741] env[68638]: DEBUG nova.compute.manager [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1043.325485] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834197, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.325834] env[68638]: DEBUG oslo_concurrency.lockutils [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.596s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.326080] env[68638]: DEBUG nova.objects.instance [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lazy-loading 'resources' on Instance uuid e7559933-fecc-4eb6-ba71-a295fba684e4 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1043.386461] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834198, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.409862] env[68638]: DEBUG oslo_concurrency.lockutils [None req-cf2cc6c9-aed6-4129-bc42-2c690dbac57b tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Releasing lock "refresh_cache-da886efd-bca9-45aa-abcc-13832c66a90c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1043.410237] env[68638]: DEBUG nova.compute.manager [None req-cf2cc6c9-aed6-4129-bc42-2c690dbac57b tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Inject network info {{(pid=68638) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 1043.410608] env[68638]: DEBUG nova.compute.manager [None req-cf2cc6c9-aed6-4129-bc42-2c690dbac57b tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] network_info to inject: |[{"id": "64e16852-058c-41a3-804c-d16bb756b439", "address": "fa:16:3e:4b:c3:15", "network": {"id": "80119fab-23a5-4556-af67-8892a45697a4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1071287649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e89fff19d6c461e8818d182dfd7d45e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64e16852-05", "ovs_interfaceid": "64e16852-058c-41a3-804c-d16bb756b439", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 1043.416226] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cf2cc6c9-aed6-4129-bc42-2c690dbac57b tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Reconfiguring VM instance to set the machine id {{(pid=68638) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1043.416577] env[68638]: DEBUG oslo_concurrency.lockutils [req-9bd283ed-1c3b-4469-a824-1d9dff63d397 req-b0bfb620-b444-433d-b64e-c7b279212f8d service nova] Acquired lock "refresh_cache-da886efd-bca9-45aa-abcc-13832c66a90c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1043.416789] env[68638]: DEBUG nova.network.neutron [req-9bd283ed-1c3b-4469-a824-1d9dff63d397 req-b0bfb620-b444-433d-b64e-c7b279212f8d service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Refreshing network info cache for port 64e16852-058c-41a3-804c-d16bb756b439 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1043.418413] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ae01da7-cf23-4b7d-ac63-ba3af6a3a545 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.439011] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834196, 'name': PowerOffVM_Task, 'duration_secs': 0.580842} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.440447] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1043.440688] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1043.441912] env[68638]: DEBUG oslo_vmware.api [None req-cf2cc6c9-aed6-4129-bc42-2c690dbac57b tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Waiting for the task: (returnval){ [ 1043.441912] env[68638]: value = "task-2834200" [ 1043.441912] env[68638]: _type = "Task" [ 1043.441912] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.442682] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc3ca147-de71-4b16-b61d-80d74983f950 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.446079] env[68638]: INFO nova.compute.manager [None req-3949a0f9-22df-4db1-a720-168dd81f56e6 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Detaching volume e57502e7-aa0f-4e7b-90cd-6099cf70f48c [ 1043.459268] env[68638]: DEBUG oslo_vmware.api [None req-cf2cc6c9-aed6-4129-bc42-2c690dbac57b tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': task-2834200, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.459557] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1043.460417] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2eb8d41f-dbd0-4cbd-8ca3-c7c92cdd92b2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.483271] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1043.483520] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1043.483749] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Deleting the datastore file [datastore1] 9ddb29ae-9724-4712-af58-4b8d6546c6af {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1043.484032] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a229c47f-7059-4999-9a83-40b026c18a06 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.490499] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Waiting for the task: (returnval){ [ 1043.490499] env[68638]: value = "task-2834202" [ 1043.490499] env[68638]: _type = "Task" [ 1043.490499] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.495361] env[68638]: INFO nova.virt.block_device [None req-3949a0f9-22df-4db1-a720-168dd81f56e6 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Attempting to driver detach volume e57502e7-aa0f-4e7b-90cd-6099cf70f48c from mountpoint /dev/sdb [ 1043.495637] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-3949a0f9-22df-4db1-a720-168dd81f56e6 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Volume detach. Driver type: vmdk {{(pid=68638) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1043.495887] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-3949a0f9-22df-4db1-a720-168dd81f56e6 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569878', 'volume_id': 'e57502e7-aa0f-4e7b-90cd-6099cf70f48c', 'name': 'volume-e57502e7-aa0f-4e7b-90cd-6099cf70f48c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1b176c5d-e77c-410b-b282-b7bba65359a9', 'attached_at': '', 'detached_at': '', 'volume_id': 'e57502e7-aa0f-4e7b-90cd-6099cf70f48c', 'serial': 'e57502e7-aa0f-4e7b-90cd-6099cf70f48c'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1043.496910] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a88ef6-43c5-43b0-a78c-e783a9fd033f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.504842] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834202, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.525027] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-648b4a2a-e983-4e26-b00f-bbeb916b4cca {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.528901] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e188ac17-4ac5-4239-ad0d-b2b014d2ab5f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.551961] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e1ea5c-18af-4181-81a7-5b2a55e7e88e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.555016] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-55909d0d-4a47-4bc5-a3d8-141885eddcdd tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1043.555315] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c5e85d8-a369-42f3-af12-ef50c661064f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.584516] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b539323-9969-4c5f-962c-2347a26dfebb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.586586] env[68638]: DEBUG oslo_vmware.api [None req-55909d0d-4a47-4bc5-a3d8-141885eddcdd tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1043.586586] env[68638]: value = "task-2834204" [ 1043.586586] env[68638]: _type = "Task" [ 1043.586586] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.604828] env[68638]: DEBUG oslo_vmware.api [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834194, 'name': CreateSnapshot_Task, 'duration_secs': 1.289328} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.605188] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-3949a0f9-22df-4db1-a720-168dd81f56e6 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] The volume has not been displaced from its original location: [datastore2] volume-e57502e7-aa0f-4e7b-90cd-6099cf70f48c/volume-e57502e7-aa0f-4e7b-90cd-6099cf70f48c.vmdk. No consolidation needed. {{(pid=68638) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1043.611528] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-3949a0f9-22df-4db1-a720-168dd81f56e6 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Reconfiguring VM instance instance-00000024 to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1043.612448] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Created Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1043.612752] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f5ff914-f77d-46cc-bf9c-10caac5cfc40 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.629666] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63760e10-6e3b-44f1-a314-02e380a63f7e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.632731] env[68638]: DEBUG oslo_vmware.api [None req-55909d0d-4a47-4bc5-a3d8-141885eddcdd tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834204, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.642769] env[68638]: DEBUG oslo_vmware.api [None req-3949a0f9-22df-4db1-a720-168dd81f56e6 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1043.642769] env[68638]: value = "task-2834205" [ 1043.642769] env[68638]: _type = "Task" [ 1043.642769] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.651037] env[68638]: DEBUG oslo_vmware.api [None req-3949a0f9-22df-4db1-a720-168dd81f56e6 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834205, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.753048] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834199, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.1779} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.753048] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1043.754052] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d40b08b-f26d-4dc8-b206-bb7522209647 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.774067] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] 2cdcff10-089b-47fd-ba41-2e3a75cd33b0/2cdcff10-089b-47fd-ba41-2e3a75cd33b0.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1043.774379] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c23b89cb-cacf-4e5b-99a9-8de7e8a0263e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.793846] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Waiting for the task: (returnval){ [ 1043.793846] env[68638]: value = "task-2834206" [ 1043.793846] env[68638]: _type = "Task" [ 1043.793846] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.801893] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834206, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.821831] env[68638]: DEBUG oslo_vmware.api [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834197, 'name': PowerOnVM_Task, 'duration_secs': 0.63835} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.822211] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1043.822477] env[68638]: DEBUG nova.compute.manager [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1043.823394] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56abce1b-b79b-424e-8877-b980e9e184e4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.830444] env[68638]: DEBUG nova.compute.utils [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1043.836827] env[68638]: DEBUG nova.compute.manager [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1043.837025] env[68638]: DEBUG nova.network.neutron [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1043.862117] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Acquiring lock "da886efd-bca9-45aa-abcc-13832c66a90c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.862372] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Lock "da886efd-bca9-45aa-abcc-13832c66a90c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.862725] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Acquiring lock "da886efd-bca9-45aa-abcc-13832c66a90c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.863215] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Lock "da886efd-bca9-45aa-abcc-13832c66a90c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.863215] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Lock "da886efd-bca9-45aa-abcc-13832c66a90c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.865365] env[68638]: INFO nova.compute.manager [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Terminating instance [ 1043.888339] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834198, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539312} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.889757] env[68638]: DEBUG nova.policy [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '075b1dab9233409390d346c7bbfa3d4e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'efa342b9d9a34e9e8e708c8f356f905e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1043.893620] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 0be6f174-fad2-4ee3-be07-b6190073b40c/0be6f174-fad2-4ee3-be07-b6190073b40c.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1043.893871] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1043.894545] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e88baebb-5acd-418c-a360-1405cbd2ade1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.901975] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1043.901975] env[68638]: value = "task-2834207" [ 1043.901975] env[68638]: _type = "Task" [ 1043.901975] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.912635] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834207, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.962190] env[68638]: DEBUG oslo_vmware.api [None req-cf2cc6c9-aed6-4129-bc42-2c690dbac57b tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': task-2834200, 'name': ReconfigVM_Task, 'duration_secs': 0.156871} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.962530] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-cf2cc6c9-aed6-4129-bc42-2c690dbac57b tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Reconfigured VM instance to set the machine id {{(pid=68638) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1044.003907] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834202, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109787} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.004216] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1044.004730] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1044.004730] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1044.096266] env[68638]: DEBUG oslo_vmware.api [None req-55909d0d-4a47-4bc5-a3d8-141885eddcdd tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834204, 'name': PowerOffVM_Task, 'duration_secs': 0.339317} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.098641] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-55909d0d-4a47-4bc5-a3d8-141885eddcdd tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1044.103883] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-55909d0d-4a47-4bc5-a3d8-141885eddcdd tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Reconfiguring VM instance instance-0000005a to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1044.109019] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52316b04-cfa0-4e05-9e95-1a4fe38b1f34 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.121407] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cc50355-8750-4f93-87be-082251adb694 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.134541] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f5dbf5-84ab-4abb-b84e-453cf31fd6e4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.136531] env[68638]: DEBUG oslo_vmware.api [None req-55909d0d-4a47-4bc5-a3d8-141885eddcdd tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1044.136531] env[68638]: value = "task-2834208" [ 1044.136531] env[68638]: _type = "Task" [ 1044.136531] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.171386] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Creating linked-clone VM from snapshot {{(pid=68638) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1044.174703] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-67f54c75-938f-4fb9-bc64-86ed7fe5826a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.180782] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6993bf9e-17d2-4838-b3a2-578f71685804 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.186527] env[68638]: DEBUG oslo_vmware.api [None req-55909d0d-4a47-4bc5-a3d8-141885eddcdd tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834208, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.193022] env[68638]: DEBUG oslo_vmware.api [None req-3949a0f9-22df-4db1-a720-168dd81f56e6 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834205, 'name': ReconfigVM_Task, 'duration_secs': 0.391608} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.196034] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-3949a0f9-22df-4db1-a720-168dd81f56e6 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Reconfigured VM instance instance-00000024 to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1044.205315] env[68638]: DEBUG oslo_vmware.api [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 1044.205315] env[68638]: value = "task-2834209" [ 1044.205315] env[68638]: _type = "Task" [ 1044.205315] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.205315] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb09ffc7-599b-4e92-aa81-8366214b7a66 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.212986] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af55e124-22e0-4ba2-9ff0-679b12aa8ce6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.225287] env[68638]: DEBUG oslo_vmware.api [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834209, 'name': CloneVM_Task} progress is 12%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.233965] env[68638]: DEBUG oslo_vmware.api [None req-3949a0f9-22df-4db1-a720-168dd81f56e6 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1044.233965] env[68638]: value = "task-2834210" [ 1044.233965] env[68638]: _type = "Task" [ 1044.233965] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.234866] env[68638]: DEBUG nova.compute.provider_tree [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1044.247111] env[68638]: DEBUG oslo_vmware.api [None req-3949a0f9-22df-4db1-a720-168dd81f56e6 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834210, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.255539] env[68638]: DEBUG nova.network.neutron [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Successfully created port: f197b79d-c838-421c-930b-a1d1d5f1b89c {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1044.278561] env[68638]: DEBUG nova.network.neutron [req-9bd283ed-1c3b-4469-a824-1d9dff63d397 req-b0bfb620-b444-433d-b64e-c7b279212f8d service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Updated VIF entry in instance network info cache for port 64e16852-058c-41a3-804c-d16bb756b439. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1044.279401] env[68638]: DEBUG nova.network.neutron [req-9bd283ed-1c3b-4469-a824-1d9dff63d397 req-b0bfb620-b444-433d-b64e-c7b279212f8d service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Updating instance_info_cache with network_info: [{"id": "64e16852-058c-41a3-804c-d16bb756b439", "address": "fa:16:3e:4b:c3:15", "network": {"id": "80119fab-23a5-4556-af67-8892a45697a4", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1071287649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e89fff19d6c461e8818d182dfd7d45e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64e16852-05", "ovs_interfaceid": "64e16852-058c-41a3-804c-d16bb756b439", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.308238] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834206, 'name': ReconfigVM_Task, 'duration_secs': 0.323026} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.308238] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Reconfigured VM instance instance-0000005f to attach disk [datastore2] 2cdcff10-089b-47fd-ba41-2e3a75cd33b0/2cdcff10-089b-47fd-ba41-2e3a75cd33b0.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1044.308238] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b5c2c766-8e77-44bf-b4de-da83fa03d029 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.314718] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Waiting for the task: (returnval){ [ 1044.314718] env[68638]: value = "task-2834211" [ 1044.314718] env[68638]: _type = "Task" [ 1044.314718] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.323190] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834211, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.337340] env[68638]: DEBUG nova.compute.manager [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1044.349630] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1044.371672] env[68638]: DEBUG nova.compute.manager [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1044.371672] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1044.372292] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d79d83d0-8b13-4d66-a9c2-adc4e66a493a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.382728] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1044.383038] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2211a02d-db3d-4f47-96fa-34fb5c63ccd0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.389595] env[68638]: DEBUG oslo_vmware.api [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Waiting for the task: (returnval){ [ 1044.389595] env[68638]: value = "task-2834212" [ 1044.389595] env[68638]: _type = "Task" [ 1044.389595] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.401043] env[68638]: DEBUG oslo_vmware.api [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': task-2834212, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.412177] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834207, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077202} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.412464] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1044.413273] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e8d5519-4d0a-4f5e-909c-609f1e604b24 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.439568] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] 0be6f174-fad2-4ee3-be07-b6190073b40c/0be6f174-fad2-4ee3-be07-b6190073b40c.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1044.439951] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30c1d715-b34b-4962-be7e-2db1222ad524 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.462998] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1044.462998] env[68638]: value = "task-2834213" [ 1044.462998] env[68638]: _type = "Task" [ 1044.462998] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.472942] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834213, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.647456] env[68638]: DEBUG oslo_vmware.api [None req-55909d0d-4a47-4bc5-a3d8-141885eddcdd tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834208, 'name': ReconfigVM_Task, 'duration_secs': 0.503536} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.647745] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-55909d0d-4a47-4bc5-a3d8-141885eddcdd tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Reconfigured VM instance instance-0000005a to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1044.647947] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-55909d0d-4a47-4bc5-a3d8-141885eddcdd tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1044.648195] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2cb55bf6-3334-4994-bb17-45ef013d8efb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.657620] env[68638]: DEBUG oslo_vmware.api [None req-55909d0d-4a47-4bc5-a3d8-141885eddcdd tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1044.657620] env[68638]: value = "task-2834214" [ 1044.657620] env[68638]: _type = "Task" [ 1044.657620] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.666957] env[68638]: DEBUG oslo_vmware.api [None req-55909d0d-4a47-4bc5-a3d8-141885eddcdd tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834214, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.723182] env[68638]: DEBUG oslo_vmware.api [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834209, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.753230] env[68638]: DEBUG oslo_vmware.api [None req-3949a0f9-22df-4db1-a720-168dd81f56e6 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834210, 'name': ReconfigVM_Task, 'duration_secs': 0.385293} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.753230] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-3949a0f9-22df-4db1-a720-168dd81f56e6 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-569878', 'volume_id': 'e57502e7-aa0f-4e7b-90cd-6099cf70f48c', 'name': 'volume-e57502e7-aa0f-4e7b-90cd-6099cf70f48c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1b176c5d-e77c-410b-b282-b7bba65359a9', 'attached_at': '', 'detached_at': '', 'volume_id': 'e57502e7-aa0f-4e7b-90cd-6099cf70f48c', 'serial': 'e57502e7-aa0f-4e7b-90cd-6099cf70f48c'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1044.763403] env[68638]: ERROR nova.scheduler.client.report [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [req-72f1b91e-37ab-43d9-bb19-a1cc6a8ffab9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-72f1b91e-37ab-43d9-bb19-a1cc6a8ffab9"}]} [ 1044.782632] env[68638]: DEBUG oslo_concurrency.lockutils [req-9bd283ed-1c3b-4469-a824-1d9dff63d397 req-b0bfb620-b444-433d-b64e-c7b279212f8d service nova] Releasing lock "refresh_cache-da886efd-bca9-45aa-abcc-13832c66a90c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1044.784054] env[68638]: DEBUG nova.scheduler.client.report [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1044.787282] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "4765bf70-1a72-4102-b5d3-ccedb7c383ea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1044.787398] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "4765bf70-1a72-4102-b5d3-ccedb7c383ea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1044.787568] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "4765bf70-1a72-4102-b5d3-ccedb7c383ea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1044.787746] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "4765bf70-1a72-4102-b5d3-ccedb7c383ea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1044.787986] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "4765bf70-1a72-4102-b5d3-ccedb7c383ea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1044.790194] env[68638]: INFO nova.compute.manager [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Terminating instance [ 1044.808022] env[68638]: DEBUG nova.scheduler.client.report [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1044.808022] env[68638]: DEBUG nova.compute.provider_tree [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1044.823316] env[68638]: DEBUG nova.scheduler.client.report [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1044.834362] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834211, 'name': Rename_Task, 'duration_secs': 0.158909} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.834362] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1044.834362] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1d73e8f7-d382-4a1e-ad21-6bd85dfa253d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.843034] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Waiting for the task: (returnval){ [ 1044.843034] env[68638]: value = "task-2834215" [ 1044.843034] env[68638]: _type = "Task" [ 1044.843034] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.848191] env[68638]: DEBUG nova.scheduler.client.report [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1044.861050] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834215, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.903550] env[68638]: DEBUG oslo_vmware.api [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': task-2834212, 'name': PowerOffVM_Task, 'duration_secs': 0.505925} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.903918] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1044.904146] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1044.904695] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b8a1a005-fa3b-4777-bc40-16f64b5cbfef {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.976279] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834213, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.982279] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1044.982329] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1044.982543] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Deleting the datastore file [datastore2] da886efd-bca9-45aa-abcc-13832c66a90c {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1044.985460] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8908abb3-a325-474d-b08e-a32e3422c186 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.993551] env[68638]: DEBUG oslo_vmware.api [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Waiting for the task: (returnval){ [ 1044.993551] env[68638]: value = "task-2834217" [ 1044.993551] env[68638]: _type = "Task" [ 1044.993551] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.003676] env[68638]: DEBUG oslo_vmware.api [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': task-2834217, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.045926] env[68638]: DEBUG nova.virt.hardware [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1045.046259] env[68638]: DEBUG nova.virt.hardware [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1045.046500] env[68638]: DEBUG nova.virt.hardware [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1045.046737] env[68638]: DEBUG nova.virt.hardware [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1045.046966] env[68638]: DEBUG nova.virt.hardware [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1045.047192] env[68638]: DEBUG nova.virt.hardware [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1045.047458] env[68638]: DEBUG nova.virt.hardware [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1045.047687] env[68638]: DEBUG nova.virt.hardware [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1045.047956] env[68638]: DEBUG nova.virt.hardware [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1045.048155] env[68638]: DEBUG nova.virt.hardware [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1045.048425] env[68638]: DEBUG nova.virt.hardware [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1045.049476] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f59892e0-d77e-4e22-8129-c37ee06d8360 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.065877] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b4cb737-1f0b-4f94-bc63-57eead5816e6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.085805] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Instance VIF info [] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1045.093252] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1045.096752] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1045.097360] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-43a146b2-5e15-4515-8c68-62ee7081bffe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.120428] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1045.120428] env[68638]: value = "task-2834218" [ 1045.120428] env[68638]: _type = "Task" [ 1045.120428] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.131166] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834218, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.162490] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba86500-2748-4d21-a32e-5e2f58c144fd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.170698] env[68638]: DEBUG oslo_vmware.api [None req-55909d0d-4a47-4bc5-a3d8-141885eddcdd tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834214, 'name': PowerOnVM_Task, 'duration_secs': 0.490397} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.172799] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-55909d0d-4a47-4bc5-a3d8-141885eddcdd tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1045.173073] env[68638]: DEBUG nova.compute.manager [None req-55909d0d-4a47-4bc5-a3d8-141885eddcdd tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1045.173991] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d8c35c4-4461-4f2e-aed9-16c4ca3bb5f5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.177601] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-690f35b8-ddec-4d75-88bf-e9ca4c2db4ba {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.218247] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-260ec2ca-19a3-4f32-8b1d-5226841f92f9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.226985] env[68638]: DEBUG oslo_vmware.api [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834209, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.230211] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3dd7e09-e89c-475d-997a-220a6c323845 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.245852] env[68638]: DEBUG nova.compute.provider_tree [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1045.296167] env[68638]: DEBUG nova.compute.manager [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1045.296444] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1045.297372] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ba5c6a-0cb6-47d6-b9d7-00c247d7b719 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.307038] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1045.308524] env[68638]: DEBUG nova.objects.instance [None req-3949a0f9-22df-4db1-a720-168dd81f56e6 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lazy-loading 'flavor' on Instance uuid 1b176c5d-e77c-410b-b282-b7bba65359a9 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1045.309830] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4d109673-bad9-4675-b457-d1f832de329f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.317206] env[68638]: DEBUG oslo_vmware.api [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1045.317206] env[68638]: value = "task-2834219" [ 1045.317206] env[68638]: _type = "Task" [ 1045.317206] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.327251] env[68638]: DEBUG oslo_vmware.api [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834219, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.351740] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834215, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.357228] env[68638]: DEBUG nova.compute.manager [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1045.382652] env[68638]: DEBUG nova.virt.hardware [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1045.382950] env[68638]: DEBUG nova.virt.hardware [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1045.383157] env[68638]: DEBUG nova.virt.hardware [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1045.383360] env[68638]: DEBUG nova.virt.hardware [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1045.383514] env[68638]: DEBUG nova.virt.hardware [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1045.383702] env[68638]: DEBUG nova.virt.hardware [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1045.383980] env[68638]: DEBUG nova.virt.hardware [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1045.384197] env[68638]: DEBUG nova.virt.hardware [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1045.384382] env[68638]: DEBUG nova.virt.hardware [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1045.384548] env[68638]: DEBUG nova.virt.hardware [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1045.384752] env[68638]: DEBUG nova.virt.hardware [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1045.385339] env[68638]: DEBUG oslo_concurrency.lockutils [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "6213446a-f6a4-439b-a1ed-5b8c2234d6ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1045.385556] env[68638]: DEBUG oslo_concurrency.lockutils [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "6213446a-f6a4-439b-a1ed-5b8c2234d6ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.388423] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-196fd09e-22be-4984-9573-4905f4dff6b2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.399239] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3de2719-7409-4812-b2e6-a3bef979f50b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.478784] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834213, 'name': ReconfigVM_Task, 'duration_secs': 0.632777} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.479479] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Reconfigured VM instance instance-00000062 to attach disk [datastore2] 0be6f174-fad2-4ee3-be07-b6190073b40c/0be6f174-fad2-4ee3-be07-b6190073b40c.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1045.481174] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6880eaea-05c0-458f-a9b6-2b61358d122b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.487941] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1045.487941] env[68638]: value = "task-2834220" [ 1045.487941] env[68638]: _type = "Task" [ 1045.487941] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.496958] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834220, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.504672] env[68638]: DEBUG oslo_vmware.api [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Task: {'id': task-2834217, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.196583} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.504928] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1045.505138] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1045.505321] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1045.505499] env[68638]: INFO nova.compute.manager [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1045.506625] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1045.506625] env[68638]: DEBUG nova.compute.manager [-] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1045.506625] env[68638]: DEBUG nova.network.neutron [-] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1045.630736] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834218, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.727392] env[68638]: DEBUG oslo_vmware.api [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834209, 'name': CloneVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.766962] env[68638]: ERROR nova.scheduler.client.report [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [req-38be9cb2-4ff5-4453-b623-c257c41153d5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-38be9cb2-4ff5-4453-b623-c257c41153d5"}]} [ 1045.833214] env[68638]: DEBUG oslo_vmware.api [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834219, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.851669] env[68638]: DEBUG oslo_vmware.api [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834215, 'name': PowerOnVM_Task, 'duration_secs': 0.561272} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.851983] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1045.852209] env[68638]: DEBUG nova.compute.manager [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1045.853017] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-316fcd0e-ec40-4896-80a2-f742ff3eb711 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.857472] env[68638]: DEBUG nova.compute.manager [req-449861ee-39e5-4d10-b8a8-4c7bedb9583a req-96003c30-0313-446a-b332-e5cf76e09416 service nova] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Received event network-vif-plugged-f197b79d-c838-421c-930b-a1d1d5f1b89c {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1045.857691] env[68638]: DEBUG oslo_concurrency.lockutils [req-449861ee-39e5-4d10-b8a8-4c7bedb9583a req-96003c30-0313-446a-b332-e5cf76e09416 service nova] Acquiring lock "cc2e9758-45ee-4e94-ad74-ba7d6c85f06d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1045.857898] env[68638]: DEBUG oslo_concurrency.lockutils [req-449861ee-39e5-4d10-b8a8-4c7bedb9583a req-96003c30-0313-446a-b332-e5cf76e09416 service nova] Lock "cc2e9758-45ee-4e94-ad74-ba7d6c85f06d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.858072] env[68638]: DEBUG oslo_concurrency.lockutils [req-449861ee-39e5-4d10-b8a8-4c7bedb9583a req-96003c30-0313-446a-b332-e5cf76e09416 service nova] Lock "cc2e9758-45ee-4e94-ad74-ba7d6c85f06d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1045.858241] env[68638]: DEBUG nova.compute.manager [req-449861ee-39e5-4d10-b8a8-4c7bedb9583a req-96003c30-0313-446a-b332-e5cf76e09416 service nova] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] No waiting events found dispatching network-vif-plugged-f197b79d-c838-421c-930b-a1d1d5f1b89c {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1045.858401] env[68638]: WARNING nova.compute.manager [req-449861ee-39e5-4d10-b8a8-4c7bedb9583a req-96003c30-0313-446a-b332-e5cf76e09416 service nova] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Received unexpected event network-vif-plugged-f197b79d-c838-421c-930b-a1d1d5f1b89c for instance with vm_state building and task_state spawning. [ 1045.876770] env[68638]: DEBUG nova.scheduler.client.report [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1045.892630] env[68638]: DEBUG nova.compute.manager [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1045.897926] env[68638]: DEBUG nova.scheduler.client.report [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1045.898217] env[68638]: DEBUG nova.compute.provider_tree [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1045.911854] env[68638]: DEBUG nova.scheduler.client.report [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1045.933792] env[68638]: DEBUG nova.scheduler.client.report [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1045.974778] env[68638]: DEBUG nova.network.neutron [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Successfully updated port: f197b79d-c838-421c-930b-a1d1d5f1b89c {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1046.003096] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834220, 'name': Rename_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.095193] env[68638]: DEBUG nova.compute.manager [req-f0ad2b7f-fd7d-40eb-9d59-43930a7d9d70 req-22b43786-6a5c-4f01-8dda-b3e6607a3ba8 service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Received event network-vif-deleted-64e16852-058c-41a3-804c-d16bb756b439 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1046.095409] env[68638]: INFO nova.compute.manager [req-f0ad2b7f-fd7d-40eb-9d59-43930a7d9d70 req-22b43786-6a5c-4f01-8dda-b3e6607a3ba8 service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Neutron deleted interface 64e16852-058c-41a3-804c-d16bb756b439; detaching it from the instance and deleting it from the info cache [ 1046.095582] env[68638]: DEBUG nova.network.neutron [req-f0ad2b7f-fd7d-40eb-9d59-43930a7d9d70 req-22b43786-6a5c-4f01-8dda-b3e6607a3ba8 service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.131115] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834218, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.226300] env[68638]: DEBUG oslo_vmware.api [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834209, 'name': CloneVM_Task, 'duration_secs': 1.632782} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.229087] env[68638]: INFO nova.virt.vmwareapi.vmops [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Created linked-clone VM from snapshot [ 1046.230102] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3dd7bc7-ae2a-4032-a9a4-3aef853081e6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.239690] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Uploading image a615c3b0-d235-4e52-9794-8b84340322db {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1046.245232] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5cd4f88-39e0-496d-a369-f9def158cfaf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.253732] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae3453f3-805a-4e65-b366-f71bd40da5e6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.258375] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Destroying the VM {{(pid=68638) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1046.259029] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-57fe51d5-fb76-4bb7-83ba-498ccd8bf039 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.289138] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c329b109-3a81-4945-8461-883a7ba44e4a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.291941] env[68638]: DEBUG oslo_vmware.api [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 1046.291941] env[68638]: value = "task-2834221" [ 1046.291941] env[68638]: _type = "Task" [ 1046.291941] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.299220] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac5180bd-b8a0-4af5-a160-0ba5cd141ba9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.306079] env[68638]: DEBUG oslo_vmware.api [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834221, 'name': Destroy_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.315908] env[68638]: DEBUG nova.compute.provider_tree [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1046.323391] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3949a0f9-22df-4db1-a720-168dd81f56e6 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "1b176c5d-e77c-410b-b282-b7bba65359a9" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.383s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.337149] env[68638]: DEBUG oslo_vmware.api [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834219, 'name': PowerOffVM_Task, 'duration_secs': 0.914635} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.337419] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1046.337587] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1046.337869] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9994dfd4-4b6b-436b-8387-5c927f6edeb6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.375273] env[68638]: DEBUG oslo_concurrency.lockutils [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1046.399422] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1046.399629] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1046.399813] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Deleting the datastore file [datastore2] 4765bf70-1a72-4102-b5d3-ccedb7c383ea {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1046.402006] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7cce786b-980b-4a63-9030-e2719129e561 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.408491] env[68638]: DEBUG oslo_vmware.api [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1046.408491] env[68638]: value = "task-2834223" [ 1046.408491] env[68638]: _type = "Task" [ 1046.408491] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.416790] env[68638]: DEBUG oslo_vmware.api [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834223, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.417830] env[68638]: DEBUG oslo_concurrency.lockutils [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1046.477782] env[68638]: DEBUG oslo_concurrency.lockutils [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "refresh_cache-cc2e9758-45ee-4e94-ad74-ba7d6c85f06d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.477998] env[68638]: DEBUG oslo_concurrency.lockutils [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired lock "refresh_cache-cc2e9758-45ee-4e94-ad74-ba7d6c85f06d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.478221] env[68638]: DEBUG nova.network.neutron [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1046.500281] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834220, 'name': Rename_Task, 'duration_secs': 0.712323} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.500568] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1046.500818] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27f0b622-d1d5-4cf5-b7bb-c1fa246e8446 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.506874] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1046.506874] env[68638]: value = "task-2834224" [ 1046.506874] env[68638]: _type = "Task" [ 1046.506874] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.520546] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834224, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.573314] env[68638]: DEBUG nova.network.neutron [-] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.598693] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8fabeb8e-b7f2-499e-b507-40905f49d0c4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.608513] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2996e769-205b-4097-9dcb-cf023b13e3cb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.630805] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834218, 'name': CreateVM_Task, 'duration_secs': 1.184539} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.630805] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1046.631152] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.631338] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.631971] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1046.645309] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c35e2ce-e252-4db2-894c-a0073ebfaebf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.647575] env[68638]: DEBUG nova.compute.manager [req-f0ad2b7f-fd7d-40eb-9d59-43930a7d9d70 req-22b43786-6a5c-4f01-8dda-b3e6607a3ba8 service nova] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Detach interface failed, port_id=64e16852-058c-41a3-804c-d16bb756b439, reason: Instance da886efd-bca9-45aa-abcc-13832c66a90c could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1046.651853] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Waiting for the task: (returnval){ [ 1046.651853] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ea6841-6edf-3842-f137-76e81b35f33c" [ 1046.651853] env[68638]: _type = "Task" [ 1046.651853] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.661137] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ea6841-6edf-3842-f137-76e81b35f33c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.802574] env[68638]: DEBUG oslo_vmware.api [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834221, 'name': Destroy_Task, 'duration_secs': 0.484643} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.802923] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Destroyed the VM [ 1046.803204] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Deleting Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1046.803463] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-1e0443e4-8a72-47d7-93c9-3bd57a21de0f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.810567] env[68638]: DEBUG oslo_vmware.api [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 1046.810567] env[68638]: value = "task-2834225" [ 1046.810567] env[68638]: _type = "Task" [ 1046.810567] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.822401] env[68638]: DEBUG oslo_vmware.api [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834225, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.852026] env[68638]: DEBUG nova.scheduler.client.report [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 132 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1046.852196] env[68638]: DEBUG nova.compute.provider_tree [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 132 to 133 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1046.852423] env[68638]: DEBUG nova.compute.provider_tree [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1046.894636] env[68638]: DEBUG oslo_concurrency.lockutils [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1046.923574] env[68638]: DEBUG oslo_vmware.api [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834223, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188645} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.924015] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1046.924338] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1046.924743] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1046.924924] env[68638]: INFO nova.compute.manager [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1046.925342] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1046.925754] env[68638]: DEBUG nova.compute.manager [-] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1046.925872] env[68638]: DEBUG nova.network.neutron [-] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1047.011679] env[68638]: DEBUG nova.network.neutron [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1047.019581] env[68638]: DEBUG oslo_vmware.api [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834224, 'name': PowerOnVM_Task, 'duration_secs': 0.495168} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.019865] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1047.020084] env[68638]: INFO nova.compute.manager [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Took 8.36 seconds to spawn the instance on the hypervisor. [ 1047.020267] env[68638]: DEBUG nova.compute.manager [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1047.021086] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c30c332f-b9bb-4e22-978b-10bf126ec4b8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.076201] env[68638]: INFO nova.compute.manager [-] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Took 1.57 seconds to deallocate network for instance. [ 1047.164502] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ea6841-6edf-3842-f137-76e81b35f33c, 'name': SearchDatastore_Task, 'duration_secs': 0.012287} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.164502] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.164502] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1047.164502] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.164502] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1047.164502] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1047.164502] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-835449bb-581e-4d8c-86e8-85c4be666db3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.176443] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1047.176443] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1047.176443] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-538f230b-399b-408e-b1b5-563239403762 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.179705] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Waiting for the task: (returnval){ [ 1047.179705] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]527eec80-8b19-d555-90a5-3458b3fc51f8" [ 1047.179705] env[68638]: _type = "Task" [ 1047.179705] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.183802] env[68638]: DEBUG nova.network.neutron [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Updating instance_info_cache with network_info: [{"id": "f197b79d-c838-421c-930b-a1d1d5f1b89c", "address": "fa:16:3e:6e:bd:cb", "network": {"id": "2181efd7-a094-4c4b-8754-da82e89be85a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1274773453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "efa342b9d9a34e9e8e708c8f356f905e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf197b79d-c8", "ovs_interfaceid": "f197b79d-c838-421c-930b-a1d1d5f1b89c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.190096] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527eec80-8b19-d555-90a5-3458b3fc51f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.254018] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Acquiring lock "2cdcff10-089b-47fd-ba41-2e3a75cd33b0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.254335] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Lock "2cdcff10-089b-47fd-ba41-2e3a75cd33b0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.254593] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Acquiring lock "2cdcff10-089b-47fd-ba41-2e3a75cd33b0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.254795] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Lock "2cdcff10-089b-47fd-ba41-2e3a75cd33b0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.254968] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Lock "2cdcff10-089b-47fd-ba41-2e3a75cd33b0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.260611] env[68638]: INFO nova.compute.manager [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Terminating instance [ 1047.321631] env[68638]: DEBUG oslo_vmware.api [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834225, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.357374] env[68638]: DEBUG oslo_concurrency.lockutils [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.031s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.360015] env[68638]: DEBUG oslo_concurrency.lockutils [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.364s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.360483] env[68638]: DEBUG nova.objects.instance [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Lazy-loading 'resources' on Instance uuid dcaef2e3-eb23-4a0b-b617-2880084e03ab {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1047.378511] env[68638]: INFO nova.scheduler.client.report [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Deleted allocations for instance e7559933-fecc-4eb6-ba71-a295fba684e4 [ 1047.379611] env[68638]: DEBUG oslo_concurrency.lockutils [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "1b176c5d-e77c-410b-b282-b7bba65359a9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.379837] env[68638]: DEBUG oslo_concurrency.lockutils [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "1b176c5d-e77c-410b-b282-b7bba65359a9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.380642] env[68638]: DEBUG oslo_concurrency.lockutils [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "1b176c5d-e77c-410b-b282-b7bba65359a9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.381069] env[68638]: DEBUG oslo_concurrency.lockutils [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "1b176c5d-e77c-410b-b282-b7bba65359a9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.381629] env[68638]: DEBUG oslo_concurrency.lockutils [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "1b176c5d-e77c-410b-b282-b7bba65359a9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.386190] env[68638]: INFO nova.compute.manager [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Terminating instance [ 1047.543708] env[68638]: INFO nova.compute.manager [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Took 26.04 seconds to build instance. [ 1047.582564] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.686524] env[68638]: DEBUG oslo_concurrency.lockutils [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Releasing lock "refresh_cache-cc2e9758-45ee-4e94-ad74-ba7d6c85f06d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.686918] env[68638]: DEBUG nova.compute.manager [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Instance network_info: |[{"id": "f197b79d-c838-421c-930b-a1d1d5f1b89c", "address": "fa:16:3e:6e:bd:cb", "network": {"id": "2181efd7-a094-4c4b-8754-da82e89be85a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1274773453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "efa342b9d9a34e9e8e708c8f356f905e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf197b79d-c8", "ovs_interfaceid": "f197b79d-c838-421c-930b-a1d1d5f1b89c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1047.691075] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:bd:cb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69cfa7ba-6989-4d75-9495-97b5fea00c3c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f197b79d-c838-421c-930b-a1d1d5f1b89c', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1047.699405] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1047.699665] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527eec80-8b19-d555-90a5-3458b3fc51f8, 'name': SearchDatastore_Task, 'duration_secs': 0.011818} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.700273] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1047.701335] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3b0266a0-30e9-4b30-9ce7-04a26389e181 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.716768] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9f38b7e-644f-4e66-ae2c-19be74a2461d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.723557] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Waiting for the task: (returnval){ [ 1047.723557] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ce87bd-acc2-df29-7dde-fd524935e659" [ 1047.723557] env[68638]: _type = "Task" [ 1047.723557] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.728260] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1047.728260] env[68638]: value = "task-2834226" [ 1047.728260] env[68638]: _type = "Task" [ 1047.728260] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.734373] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ce87bd-acc2-df29-7dde-fd524935e659, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.739721] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834226, 'name': CreateVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.765140] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Acquiring lock "refresh_cache-2cdcff10-089b-47fd-ba41-2e3a75cd33b0" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.765544] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Acquired lock "refresh_cache-2cdcff10-089b-47fd-ba41-2e3a75cd33b0" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1047.765544] env[68638]: DEBUG nova.network.neutron [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1047.772622] env[68638]: DEBUG nova.network.neutron [-] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.821852] env[68638]: DEBUG oslo_vmware.api [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834225, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.889599] env[68638]: DEBUG oslo_concurrency.lockutils [None req-acbb1894-5400-4505-a5da-dd41b24ebb08 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "e7559933-fecc-4eb6-ba71-a295fba684e4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.669s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.890995] env[68638]: DEBUG nova.compute.manager [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1047.891326] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1047.892830] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ffeb2b-8146-4cf1-a15c-c1b3756525ce {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.909418] env[68638]: DEBUG nova.compute.manager [req-923a8f91-af59-46cb-b266-c9b2053305a2 req-ac266fc3-1354-404f-81d9-40c1d606a0ea service nova] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Received event network-changed-f197b79d-c838-421c-930b-a1d1d5f1b89c {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1047.909616] env[68638]: DEBUG nova.compute.manager [req-923a8f91-af59-46cb-b266-c9b2053305a2 req-ac266fc3-1354-404f-81d9-40c1d606a0ea service nova] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Refreshing instance network info cache due to event network-changed-f197b79d-c838-421c-930b-a1d1d5f1b89c. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1047.909834] env[68638]: DEBUG oslo_concurrency.lockutils [req-923a8f91-af59-46cb-b266-c9b2053305a2 req-ac266fc3-1354-404f-81d9-40c1d606a0ea service nova] Acquiring lock "refresh_cache-cc2e9758-45ee-4e94-ad74-ba7d6c85f06d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.909976] env[68638]: DEBUG oslo_concurrency.lockutils [req-923a8f91-af59-46cb-b266-c9b2053305a2 req-ac266fc3-1354-404f-81d9-40c1d606a0ea service nova] Acquired lock "refresh_cache-cc2e9758-45ee-4e94-ad74-ba7d6c85f06d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1047.910374] env[68638]: DEBUG nova.network.neutron [req-923a8f91-af59-46cb-b266-c9b2053305a2 req-ac266fc3-1354-404f-81d9-40c1d606a0ea service nova] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Refreshing network info cache for port f197b79d-c838-421c-930b-a1d1d5f1b89c {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1047.917342] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1047.918340] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-69f159fd-33a4-401b-b755-ea27d7ccbf84 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.934079] env[68638]: DEBUG oslo_vmware.api [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1047.934079] env[68638]: value = "task-2834227" [ 1047.934079] env[68638]: _type = "Task" [ 1047.934079] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.940995] env[68638]: DEBUG oslo_vmware.api [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834227, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.046037] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3300cac1-ddb9-4175-899b-609b1a6354c6 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "0be6f174-fad2-4ee3-be07-b6190073b40c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.550s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.166683] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "43e0eed3-bc25-476d-a9ef-6b132514cf90" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.167053] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "43e0eed3-bc25-476d-a9ef-6b132514cf90" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.167335] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "43e0eed3-bc25-476d-a9ef-6b132514cf90-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.167538] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "43e0eed3-bc25-476d-a9ef-6b132514cf90-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.167712] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "43e0eed3-bc25-476d-a9ef-6b132514cf90-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.170225] env[68638]: INFO nova.compute.manager [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Terminating instance [ 1048.174805] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53f7eddf-0bd8-4d21-8f15-e5d0dbca6d4f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.185296] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-986db3a4-3594-48dc-8850-4e800d2c3c72 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.220296] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c0f3a99-e57b-415d-b437-c46c9dacee0f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.236027] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e6df69-900f-443c-a8fb-552e690e7a0f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.240844] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ce87bd-acc2-df29-7dde-fd524935e659, 'name': SearchDatastore_Task, 'duration_secs': 0.017291} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.241691] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1048.241954] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 9ddb29ae-9724-4712-af58-4b8d6546c6af/9ddb29ae-9724-4712-af58-4b8d6546c6af.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1048.242229] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b38f1ed0-050f-46d3-8c08-ac5aaf820e2a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.255741] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834226, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.255741] env[68638]: DEBUG nova.compute.provider_tree [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1048.264026] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Waiting for the task: (returnval){ [ 1048.264026] env[68638]: value = "task-2834228" [ 1048.264026] env[68638]: _type = "Task" [ 1048.264026] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.272525] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834228, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.275289] env[68638]: INFO nova.compute.manager [-] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Took 1.35 seconds to deallocate network for instance. [ 1048.287541] env[68638]: DEBUG nova.network.neutron [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1048.322516] env[68638]: DEBUG oslo_vmware.api [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834225, 'name': RemoveSnapshot_Task, 'duration_secs': 1.144367} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.322754] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Deleted Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1048.339423] env[68638]: DEBUG nova.network.neutron [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.401413] env[68638]: DEBUG nova.compute.manager [req-1e1b3e60-d672-4d7b-8e58-bd3e7b94e1ed req-bba38a95-8e49-4365-8b34-654ce9214a86 service nova] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Received event network-vif-deleted-f99283c7-566e-4386-b66a-6295a6b67f68 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1048.443813] env[68638]: DEBUG oslo_vmware.api [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834227, 'name': PowerOffVM_Task, 'duration_secs': 0.172815} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.444170] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1048.444416] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1048.444661] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1e1726b1-b856-4a45-afc9-27f3e8000245 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.679329] env[68638]: DEBUG nova.compute.manager [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1048.679567] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1048.680452] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af9ab07c-1a89-4cdf-a366-403b99d3262d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.688961] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1048.689232] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-591ff0c1-2efd-46d3-994b-917a207a5475 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.695871] env[68638]: DEBUG oslo_vmware.api [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1048.695871] env[68638]: value = "task-2834230" [ 1048.695871] env[68638]: _type = "Task" [ 1048.695871] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.700997] env[68638]: DEBUG nova.network.neutron [req-923a8f91-af59-46cb-b266-c9b2053305a2 req-ac266fc3-1354-404f-81d9-40c1d606a0ea service nova] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Updated VIF entry in instance network info cache for port f197b79d-c838-421c-930b-a1d1d5f1b89c. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1048.701382] env[68638]: DEBUG nova.network.neutron [req-923a8f91-af59-46cb-b266-c9b2053305a2 req-ac266fc3-1354-404f-81d9-40c1d606a0ea service nova] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Updating instance_info_cache with network_info: [{"id": "f197b79d-c838-421c-930b-a1d1d5f1b89c", "address": "fa:16:3e:6e:bd:cb", "network": {"id": "2181efd7-a094-4c4b-8754-da82e89be85a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1274773453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "efa342b9d9a34e9e8e708c8f356f905e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf197b79d-c8", "ovs_interfaceid": "f197b79d-c838-421c-930b-a1d1d5f1b89c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.705954] env[68638]: DEBUG oslo_vmware.api [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834230, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.739157] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834226, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.771347] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834228, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.777318] env[68638]: ERROR nova.scheduler.client.report [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] [req-e6ad6802-9ef7-4e4c-8a4e-c21dd1411a6b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e6ad6802-9ef7-4e4c-8a4e-c21dd1411a6b"}]} [ 1048.781568] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.793118] env[68638]: DEBUG nova.scheduler.client.report [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1048.806177] env[68638]: DEBUG nova.scheduler.client.report [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1048.806507] env[68638]: DEBUG nova.compute.provider_tree [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1048.815686] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1048.815686] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1048.815686] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Deleting the datastore file [datastore1] 1b176c5d-e77c-410b-b282-b7bba65359a9 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1048.817143] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6e32f980-66c5-48dd-8744-7adb12dc5e94 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.821986] env[68638]: DEBUG nova.scheduler.client.report [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1048.826955] env[68638]: WARNING nova.compute.manager [None req-aa096c8f-9381-40ae-9bf7-0d9173260f9a tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Image not found during snapshot: nova.exception.ImageNotFound: Image a615c3b0-d235-4e52-9794-8b84340322db could not be found. [ 1048.831399] env[68638]: DEBUG oslo_vmware.api [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1048.831399] env[68638]: value = "task-2834231" [ 1048.831399] env[68638]: _type = "Task" [ 1048.831399] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.842065] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Releasing lock "refresh_cache-2cdcff10-089b-47fd-ba41-2e3a75cd33b0" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1048.842466] env[68638]: DEBUG nova.compute.manager [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1048.842699] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1048.843038] env[68638]: DEBUG oslo_vmware.api [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834231, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.843950] env[68638]: DEBUG nova.scheduler.client.report [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1048.846751] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce94ab1f-baa9-485b-b189-920edb724bce {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.854275] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1048.854571] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5fcf4690-2108-4f5d-a1b0-3318147c4f83 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.861027] env[68638]: DEBUG oslo_vmware.api [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Waiting for the task: (returnval){ [ 1048.861027] env[68638]: value = "task-2834232" [ 1048.861027] env[68638]: _type = "Task" [ 1048.861027] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.871705] env[68638]: DEBUG oslo_vmware.api [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834232, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.881394] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "0be6f174-fad2-4ee3-be07-b6190073b40c" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.881747] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "0be6f174-fad2-4ee3-be07-b6190073b40c" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.881918] env[68638]: INFO nova.compute.manager [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Shelving [ 1049.114854] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-545ddc8f-b63d-4a1c-92ac-dfbfaf5ea51e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.123794] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e815b515-260b-4d7d-be07-8a8400424c9a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.165136] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fda8177-59f1-45eb-83d3-9d61548e7b30 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.170642] env[68638]: DEBUG oslo_concurrency.lockutils [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "ace44b04-6dcf-4845-af4e-b28ddeebe60e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.171036] env[68638]: DEBUG oslo_concurrency.lockutils [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "ace44b04-6dcf-4845-af4e-b28ddeebe60e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.177172] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebcd6da4-68eb-4b7f-a4e5-16bcee76ca60 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.192853] env[68638]: DEBUG nova.compute.provider_tree [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1049.208782] env[68638]: DEBUG oslo_concurrency.lockutils [req-923a8f91-af59-46cb-b266-c9b2053305a2 req-ac266fc3-1354-404f-81d9-40c1d606a0ea service nova] Releasing lock "refresh_cache-cc2e9758-45ee-4e94-ad74-ba7d6c85f06d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1049.209211] env[68638]: DEBUG oslo_vmware.api [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834230, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.241685] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834226, 'name': CreateVM_Task, 'duration_secs': 1.446448} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.242250] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1049.242996] env[68638]: DEBUG oslo_concurrency.lockutils [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.243263] env[68638]: DEBUG oslo_concurrency.lockutils [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1049.243603] env[68638]: DEBUG oslo_concurrency.lockutils [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1049.243897] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45b51f4f-9683-49bf-9a30-b46168083c0e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.249149] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1049.249149] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52cc612a-9c0c-ddcf-8e21-54878b8dff3f" [ 1049.249149] env[68638]: _type = "Task" [ 1049.249149] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.258607] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52cc612a-9c0c-ddcf-8e21-54878b8dff3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.271773] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834228, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.326992] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "71ec29a8-5e2f-4ccd-9c22-d9721c77622e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.327339] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "71ec29a8-5e2f-4ccd-9c22-d9721c77622e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.327572] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "71ec29a8-5e2f-4ccd-9c22-d9721c77622e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.327762] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "71ec29a8-5e2f-4ccd-9c22-d9721c77622e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.328045] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "71ec29a8-5e2f-4ccd-9c22-d9721c77622e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.330436] env[68638]: INFO nova.compute.manager [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Terminating instance [ 1049.343447] env[68638]: DEBUG oslo_vmware.api [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834231, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.464106} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.343447] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1049.343654] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1049.343938] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1049.344175] env[68638]: INFO nova.compute.manager [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Took 1.45 seconds to destroy the instance on the hypervisor. [ 1049.344398] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1049.344859] env[68638]: DEBUG nova.compute.manager [-] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1049.344962] env[68638]: DEBUG nova.network.neutron [-] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1049.371698] env[68638]: DEBUG oslo_vmware.api [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834232, 'name': PowerOffVM_Task, 'duration_secs': 0.345559} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.372040] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1049.372214] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1049.372477] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-434bd82f-4b5a-4aac-b22d-2d21c920a455 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.398613] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1049.398855] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1049.399094] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Deleting the datastore file [datastore2] 2cdcff10-089b-47fd-ba41-2e3a75cd33b0 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1049.400106] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-87d77b67-0bcf-4888-9f41-7c8cd524f7e4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.407776] env[68638]: DEBUG oslo_vmware.api [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Waiting for the task: (returnval){ [ 1049.407776] env[68638]: value = "task-2834234" [ 1049.407776] env[68638]: _type = "Task" [ 1049.407776] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.417893] env[68638]: DEBUG oslo_vmware.api [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834234, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.673335] env[68638]: DEBUG nova.compute.manager [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1049.711955] env[68638]: DEBUG oslo_vmware.api [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834230, 'name': PowerOffVM_Task, 'duration_secs': 0.545599} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.712260] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1049.712464] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1049.712748] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6703e852-a4d5-4d93-8734-40e35a5352a5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.734727] env[68638]: DEBUG nova.scheduler.client.report [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 134 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1049.735099] env[68638]: DEBUG nova.compute.provider_tree [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 134 to 135 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1049.735250] env[68638]: DEBUG nova.compute.provider_tree [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1049.762583] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52cc612a-9c0c-ddcf-8e21-54878b8dff3f, 'name': SearchDatastore_Task, 'duration_secs': 0.054139} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.763727] env[68638]: DEBUG oslo_concurrency.lockutils [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1049.763727] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1049.763727] env[68638]: DEBUG oslo_concurrency.lockutils [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.763908] env[68638]: DEBUG oslo_concurrency.lockutils [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1049.764993] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1049.764993] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-342370c6-a5f1-4c9f-b5f5-2b2fd9e44343 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.778960] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834228, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.279323} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.780342] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 9ddb29ae-9724-4712-af58-4b8d6546c6af/9ddb29ae-9724-4712-af58-4b8d6546c6af.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1049.780564] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1049.780868] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1049.781063] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1049.784844] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a301d22a-9f8d-4f13-8b14-37c2aa566312 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.784844] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-100b3e4d-d164-4862-be7e-769786dfe2d7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.787037] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1049.787248] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1049.787427] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Deleting the datastore file [datastore2] 43e0eed3-bc25-476d-a9ef-6b132514cf90 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1049.787683] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c90d25c-4385-4a1e-a386-9522acb371f9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.794015] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1049.794015] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52a69878-e5ce-a3f3-864d-d5f7e752aa27" [ 1049.794015] env[68638]: _type = "Task" [ 1049.794015] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.795938] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Waiting for the task: (returnval){ [ 1049.795938] env[68638]: value = "task-2834236" [ 1049.795938] env[68638]: _type = "Task" [ 1049.795938] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.804455] env[68638]: DEBUG oslo_vmware.api [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1049.804455] env[68638]: value = "task-2834237" [ 1049.804455] env[68638]: _type = "Task" [ 1049.804455] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.815366] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a69878-e5ce-a3f3-864d-d5f7e752aa27, 'name': SearchDatastore_Task, 'duration_secs': 0.012399} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.815701] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834236, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.817165] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c24512a-a181-404a-beb0-d56759136819 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.822752] env[68638]: DEBUG oslo_vmware.api [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834237, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.827569] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1049.827569] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52312c3c-1385-0eb0-d1dd-6e14ca3e62d4" [ 1049.827569] env[68638]: _type = "Task" [ 1049.827569] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.843544] env[68638]: DEBUG nova.compute.manager [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1049.843625] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1049.843952] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52312c3c-1385-0eb0-d1dd-6e14ca3e62d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.846780] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b09c311b-50dc-4301-8638-fed4b4f7d121 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.855935] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1049.857112] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f9a8cb5-955f-40d0-831b-89739e9f1d16 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.867300] env[68638]: DEBUG oslo_vmware.api [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 1049.867300] env[68638]: value = "task-2834238" [ 1049.867300] env[68638]: _type = "Task" [ 1049.867300] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.880762] env[68638]: DEBUG oslo_vmware.api [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834238, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.894578] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1049.895058] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8fa9402d-6f87-4f0b-ade8-4361c43b55d4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.905051] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1049.905051] env[68638]: value = "task-2834239" [ 1049.905051] env[68638]: _type = "Task" [ 1049.905051] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.923322] env[68638]: DEBUG oslo_vmware.api [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Task: {'id': task-2834234, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176172} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.924223] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1049.924584] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1049.924865] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1049.925150] env[68638]: INFO nova.compute.manager [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1049.925513] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1049.925853] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834239, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.926183] env[68638]: DEBUG nova.compute.manager [-] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1049.926328] env[68638]: DEBUG nova.network.neutron [-] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1049.947316] env[68638]: DEBUG nova.network.neutron [-] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1050.194097] env[68638]: DEBUG oslo_concurrency.lockutils [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.241070] env[68638]: DEBUG oslo_concurrency.lockutils [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.881s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.243457] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 5.898s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.243679] env[68638]: DEBUG nova.objects.instance [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68638) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1050.259875] env[68638]: INFO nova.scheduler.client.report [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Deleted allocations for instance dcaef2e3-eb23-4a0b-b617-2880084e03ab [ 1050.312067] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834236, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087941} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.312813] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1050.313782] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2293cac-ae9a-4c16-8807-bf7917ce1029 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.319321] env[68638]: DEBUG oslo_vmware.api [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834237, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178414} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.319925] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1050.320127] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1050.320310] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1050.320495] env[68638]: INFO nova.compute.manager [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1050.320729] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1050.320919] env[68638]: DEBUG nova.compute.manager [-] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1050.321024] env[68638]: DEBUG nova.network.neutron [-] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1050.340357] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 9ddb29ae-9724-4712-af58-4b8d6546c6af/9ddb29ae-9724-4712-af58-4b8d6546c6af.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1050.344025] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-886cefca-db5e-4e32-b30d-9e41330a4e76 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.359769] env[68638]: DEBUG oslo_concurrency.lockutils [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "90c192bd-b823-414c-b793-260eacc9904f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.360030] env[68638]: DEBUG oslo_concurrency.lockutils [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "90c192bd-b823-414c-b793-260eacc9904f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.360446] env[68638]: DEBUG oslo_concurrency.lockutils [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "90c192bd-b823-414c-b793-260eacc9904f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.360446] env[68638]: DEBUG oslo_concurrency.lockutils [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "90c192bd-b823-414c-b793-260eacc9904f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.360595] env[68638]: DEBUG oslo_concurrency.lockutils [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "90c192bd-b823-414c-b793-260eacc9904f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.362440] env[68638]: DEBUG nova.network.neutron [-] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.363844] env[68638]: INFO nova.compute.manager [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Terminating instance [ 1050.376639] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52312c3c-1385-0eb0-d1dd-6e14ca3e62d4, 'name': SearchDatastore_Task, 'duration_secs': 0.011057} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.378411] env[68638]: DEBUG oslo_concurrency.lockutils [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1050.378676] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] cc2e9758-45ee-4e94-ad74-ba7d6c85f06d/cc2e9758-45ee-4e94-ad74-ba7d6c85f06d.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1050.378998] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Waiting for the task: (returnval){ [ 1050.378998] env[68638]: value = "task-2834240" [ 1050.378998] env[68638]: _type = "Task" [ 1050.378998] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.379550] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a1c1f88c-05c0-4b45-add1-37a494f2825e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.391212] env[68638]: DEBUG oslo_vmware.api [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834238, 'name': PowerOffVM_Task, 'duration_secs': 0.301116} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.392093] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1050.392288] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1050.393054] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-579e19f8-6372-4d9a-ab63-4e4465348624 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.398413] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834240, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.400053] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1050.400053] env[68638]: value = "task-2834241" [ 1050.400053] env[68638]: _type = "Task" [ 1050.400053] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.408702] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834241, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.418017] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834239, 'name': PowerOffVM_Task, 'duration_secs': 0.218874} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.418017] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1050.418429] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dca63301-dd21-4cb6-902d-ff1e1abc8f5b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.441669] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae5cd2e-7c89-405c-9a6b-3b53993a4396 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.445916] env[68638]: DEBUG nova.compute.manager [req-e6be7f1e-8cd7-4947-82be-e608c5f463d8 req-adfdde51-e9d6-4d19-b154-df15c13ee3a6 service nova] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Received event network-vif-deleted-bc37d458-421e-4ca1-a705-30c976b1fdbd {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1050.449421] env[68638]: DEBUG nova.network.neutron [-] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.481025] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1050.481025] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1050.481306] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Deleting the datastore file [datastore1] 71ec29a8-5e2f-4ccd-9c22-d9721c77622e {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1050.482557] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-850cfaf2-ca6c-44d9-96f2-2abfbc53f399 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.493345] env[68638]: DEBUG oslo_vmware.api [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for the task: (returnval){ [ 1050.493345] env[68638]: value = "task-2834243" [ 1050.493345] env[68638]: _type = "Task" [ 1050.493345] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.504317] env[68638]: DEBUG oslo_vmware.api [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834243, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.768960] env[68638]: DEBUG oslo_concurrency.lockutils [None req-802fc10c-b301-4605-b1f4-f949a5f98753 tempest-InstanceActionsNegativeTestJSON-1984807474 tempest-InstanceActionsNegativeTestJSON-1984807474-project-member] Lock "dcaef2e3-eb23-4a0b-b617-2880084e03ab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.492s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.867024] env[68638]: INFO nova.compute.manager [-] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Took 1.52 seconds to deallocate network for instance. [ 1050.869317] env[68638]: DEBUG nova.compute.manager [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1050.869502] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1050.874317] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b1d9fd4-bb8d-43ad-addf-98ecadb336c8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.883993] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1050.888375] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-891235c3-0c51-4971-8036-a3d2ebefba9e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.897572] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834240, 'name': ReconfigVM_Task, 'duration_secs': 0.288736} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.898576] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 9ddb29ae-9724-4712-af58-4b8d6546c6af/9ddb29ae-9724-4712-af58-4b8d6546c6af.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1050.902022] env[68638]: DEBUG oslo_vmware.api [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 1050.902022] env[68638]: value = "task-2834244" [ 1050.902022] env[68638]: _type = "Task" [ 1050.902022] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.902022] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-498ce437-bc79-4a8d-9e3b-981bf80861b9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.914799] env[68638]: DEBUG oslo_vmware.api [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834244, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.921380] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834241, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.926052] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Waiting for the task: (returnval){ [ 1050.926052] env[68638]: value = "task-2834245" [ 1050.926052] env[68638]: _type = "Task" [ 1050.926052] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.936160] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834245, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.952275] env[68638]: INFO nova.compute.manager [-] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Took 1.03 seconds to deallocate network for instance. [ 1050.960821] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Creating Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1050.961518] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-73b451c5-563c-4623-b81d-fda08af85b66 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.974239] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1050.974239] env[68638]: value = "task-2834246" [ 1050.974239] env[68638]: _type = "Task" [ 1050.974239] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.989789] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834246, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.008708] env[68638]: DEBUG oslo_vmware.api [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834243, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.087457] env[68638]: DEBUG nova.network.neutron [-] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.255649] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b48b1b85-bf9f-498f-b377-8bb944900ace tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.257041] env[68638]: DEBUG oslo_concurrency.lockutils [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 4.882s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.257284] env[68638]: DEBUG nova.objects.instance [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68638) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1051.378503] env[68638]: DEBUG oslo_concurrency.lockutils [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.417016] env[68638]: DEBUG oslo_vmware.api [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834244, 'name': PowerOffVM_Task, 'duration_secs': 0.318144} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.421220] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1051.421422] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1051.421704] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834241, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.704266} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.422260] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a8ff30d4-f0c3-4f9b-ba32-6d97be1fd85e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.423858] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] cc2e9758-45ee-4e94-ad74-ba7d6c85f06d/cc2e9758-45ee-4e94-ad74-ba7d6c85f06d.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1051.424079] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1051.427021] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-16d6826f-50ff-49ed-abb0-5f6c8c2f445a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.436029] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1051.436029] env[68638]: value = "task-2834248" [ 1051.436029] env[68638]: _type = "Task" [ 1051.436029] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.439272] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834245, 'name': Rename_Task, 'duration_secs': 0.237691} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.442385] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1051.442647] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8608219b-4ab9-4ae8-9f07-04b848daaa3d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.452889] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Waiting for the task: (returnval){ [ 1051.452889] env[68638]: value = "task-2834249" [ 1051.452889] env[68638]: _type = "Task" [ 1051.452889] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.456077] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834248, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.460598] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.468363] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834249, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.484768] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834246, 'name': CreateSnapshot_Task, 'duration_secs': 0.470455} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.485044] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Created Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1051.485834] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ae2623c-a155-4b41-a69a-c9d7e1a6c7f6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.508122] env[68638]: DEBUG oslo_vmware.api [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Task: {'id': task-2834243, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.582543} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.508337] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1051.508510] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1051.508704] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1051.508883] env[68638]: INFO nova.compute.manager [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Took 1.67 seconds to destroy the instance on the hypervisor. [ 1051.509572] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1051.509795] env[68638]: DEBUG nova.compute.manager [-] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1051.509901] env[68638]: DEBUG nova.network.neutron [-] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1051.515335] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1051.515918] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1051.515918] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Deleting the datastore file [datastore2] 90c192bd-b823-414c-b793-260eacc9904f {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1051.516134] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-89ed5060-b1f1-4d38-a376-548ac2030640 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.529555] env[68638]: DEBUG oslo_vmware.api [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 1051.529555] env[68638]: value = "task-2834250" [ 1051.529555] env[68638]: _type = "Task" [ 1051.529555] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.540613] env[68638]: DEBUG oslo_vmware.api [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834250, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.590710] env[68638]: INFO nova.compute.manager [-] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Took 1.27 seconds to deallocate network for instance. [ 1051.949289] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834248, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.158941} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.949567] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1051.950449] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa9f14a3-c2da-49f8-a135-0c89ad7b7aae {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.973602] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] cc2e9758-45ee-4e94-ad74-ba7d6c85f06d/cc2e9758-45ee-4e94-ad74-ba7d6c85f06d.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1051.977605] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c644f1a5-ce81-4b76-8afd-ba81259b6001 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.008885] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Creating linked-clone VM from snapshot {{(pid=68638) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1052.008885] env[68638]: DEBUG oslo_vmware.api [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834249, 'name': PowerOnVM_Task, 'duration_secs': 0.475438} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.010148] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0dba9bd3-abb8-482c-a9a0-eb8501a05662 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.013476] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1052.013773] env[68638]: DEBUG nova.compute.manager [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1052.014200] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1052.014200] env[68638]: value = "task-2834251" [ 1052.014200] env[68638]: _type = "Task" [ 1052.014200] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.015874] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-939e2e6d-a556-4bd5-b28c-1a7e149e0537 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.025641] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1052.025641] env[68638]: value = "task-2834252" [ 1052.025641] env[68638]: _type = "Task" [ 1052.025641] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.039405] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.047698] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834252, 'name': CloneVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.053019] env[68638]: DEBUG oslo_vmware.api [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834250, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15733} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.053294] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1052.053481] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1052.054135] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1052.054337] env[68638]: INFO nova.compute.manager [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1052.054598] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1052.054794] env[68638]: DEBUG nova.compute.manager [-] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1052.054914] env[68638]: DEBUG nova.network.neutron [-] [instance: 90c192bd-b823-414c-b793-260eacc9904f] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1052.097508] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1052.266418] env[68638]: DEBUG oslo_concurrency.lockutils [None req-65224784-4518-4b1d-b698-bc4ab3b058da tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.268348] env[68638]: DEBUG oslo_concurrency.lockutils [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.850s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1052.271510] env[68638]: INFO nova.compute.claims [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1052.339208] env[68638]: DEBUG nova.network.neutron [-] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.535126] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.541113] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834252, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.558777] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1052.732196] env[68638]: DEBUG nova.compute.manager [req-b9cfad81-a229-4e54-b9d0-20020e44269d req-50bfd495-6de5-4ea8-bd22-1bad81078ccb service nova] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Received event network-vif-deleted-4b1efa68-0e83-4c79-94dc-33b0388cdbc1 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1052.732439] env[68638]: DEBUG nova.compute.manager [req-b9cfad81-a229-4e54-b9d0-20020e44269d req-50bfd495-6de5-4ea8-bd22-1bad81078ccb service nova] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Received event network-vif-deleted-14400668-d5ea-4861-8521-351f3d71704a {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1052.843658] env[68638]: INFO nova.compute.manager [-] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Took 1.33 seconds to deallocate network for instance. [ 1053.030264] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834251, 'name': ReconfigVM_Task, 'duration_secs': 0.640085} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.034993] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Reconfigured VM instance instance-00000063 to attach disk [datastore1] cc2e9758-45ee-4e94-ad74-ba7d6c85f06d/cc2e9758-45ee-4e94-ad74-ba7d6c85f06d.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1053.036454] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d229c006-a4fd-4991-9753-32be48dc3949 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.044806] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834252, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.046166] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1053.046166] env[68638]: value = "task-2834253" [ 1053.046166] env[68638]: _type = "Task" [ 1053.046166] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.060949] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834253, 'name': Rename_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.313664] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Acquiring lock "9ddb29ae-9724-4712-af58-4b8d6546c6af" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1053.314017] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Lock "9ddb29ae-9724-4712-af58-4b8d6546c6af" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1053.314170] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Acquiring lock "9ddb29ae-9724-4712-af58-4b8d6546c6af-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1053.314475] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Lock "9ddb29ae-9724-4712-af58-4b8d6546c6af-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1053.314692] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Lock "9ddb29ae-9724-4712-af58-4b8d6546c6af-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1053.317374] env[68638]: INFO nova.compute.manager [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Terminating instance [ 1053.352124] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1053.538019] env[68638]: DEBUG nova.network.neutron [-] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.544476] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834252, 'name': CloneVM_Task} progress is 95%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.559389] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834253, 'name': Rename_Task, 'duration_secs': 0.202594} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.560457] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1053.560739] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-220f6cfe-204c-45da-9081-3c2d2c3d85ac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.569129] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1053.569129] env[68638]: value = "task-2834254" [ 1053.569129] env[68638]: _type = "Task" [ 1053.569129] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.578295] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834254, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.579989] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-147440ff-91a7-468d-b4be-4f14319421b8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.590235] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b344d17-a5a4-4f46-bf4f-afe1f8a6d7e2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.626459] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daac2d21-d337-45c0-95e6-80ab7a87b9cb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.635765] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2454060-ddec-4247-a4da-b0e2e701d4b9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.657736] env[68638]: DEBUG nova.compute.provider_tree [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1053.821690] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Acquiring lock "refresh_cache-9ddb29ae-9724-4712-af58-4b8d6546c6af" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.821879] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Acquired lock "refresh_cache-9ddb29ae-9724-4712-af58-4b8d6546c6af" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1053.822086] env[68638]: DEBUG nova.network.neutron [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1054.040828] env[68638]: INFO nova.compute.manager [-] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Took 1.99 seconds to deallocate network for instance. [ 1054.046260] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834252, 'name': CloneVM_Task, 'duration_secs': 1.869641} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.048974] env[68638]: INFO nova.virt.vmwareapi.vmops [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Created linked-clone VM from snapshot [ 1054.049946] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d5701bb-9c82-4aaf-88f6-58c334c2ebd9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.058944] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Uploading image aa7b0f32-eba3-4b43-b887-66db6ba8a8cd {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1054.081764] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834254, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.095970] env[68638]: DEBUG oslo_vmware.rw_handles [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1054.095970] env[68638]: value = "vm-570011" [ 1054.095970] env[68638]: _type = "VirtualMachine" [ 1054.095970] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1054.096280] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-536e92d6-9f1d-4dea-82a4-fd669ba0fbdc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.104280] env[68638]: DEBUG oslo_vmware.rw_handles [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lease: (returnval){ [ 1054.104280] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e0693a-ecb4-d8e0-8b1d-a50e3c3bcf5b" [ 1054.104280] env[68638]: _type = "HttpNfcLease" [ 1054.104280] env[68638]: } obtained for exporting VM: (result){ [ 1054.104280] env[68638]: value = "vm-570011" [ 1054.104280] env[68638]: _type = "VirtualMachine" [ 1054.104280] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1054.104545] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the lease: (returnval){ [ 1054.104545] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e0693a-ecb4-d8e0-8b1d-a50e3c3bcf5b" [ 1054.104545] env[68638]: _type = "HttpNfcLease" [ 1054.104545] env[68638]: } to be ready. {{(pid=68638) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1054.112391] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1054.112391] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e0693a-ecb4-d8e0-8b1d-a50e3c3bcf5b" [ 1054.112391] env[68638]: _type = "HttpNfcLease" [ 1054.112391] env[68638]: } is initializing. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1054.159493] env[68638]: DEBUG nova.scheduler.client.report [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1054.364055] env[68638]: DEBUG nova.network.neutron [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1054.426291] env[68638]: DEBUG nova.network.neutron [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1054.551335] env[68638]: DEBUG oslo_concurrency.lockutils [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.581226] env[68638]: DEBUG oslo_vmware.api [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834254, 'name': PowerOnVM_Task, 'duration_secs': 0.533781} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.581460] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1054.581670] env[68638]: INFO nova.compute.manager [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Took 9.22 seconds to spawn the instance on the hypervisor. [ 1054.581847] env[68638]: DEBUG nova.compute.manager [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1054.582641] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd17674d-009d-4e8c-aca8-55fa0b037238 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.614241] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1054.614241] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e0693a-ecb4-d8e0-8b1d-a50e3c3bcf5b" [ 1054.614241] env[68638]: _type = "HttpNfcLease" [ 1054.614241] env[68638]: } is ready. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1054.614900] env[68638]: DEBUG oslo_vmware.rw_handles [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1054.614900] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e0693a-ecb4-d8e0-8b1d-a50e3c3bcf5b" [ 1054.614900] env[68638]: _type = "HttpNfcLease" [ 1054.614900] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1054.615357] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05644e37-b637-44af-a0a5-45508bb0eb27 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.624276] env[68638]: DEBUG oslo_vmware.rw_handles [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525c6aa7-1a4f-e12e-5aed-ed5ee79f566e/disk-0.vmdk from lease info. {{(pid=68638) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1054.624455] env[68638]: DEBUG oslo_vmware.rw_handles [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525c6aa7-1a4f-e12e-5aed-ed5ee79f566e/disk-0.vmdk for reading. {{(pid=68638) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1054.683573] env[68638]: DEBUG oslo_concurrency.lockutils [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.415s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.684096] env[68638]: DEBUG nova.compute.manager [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1054.688466] env[68638]: DEBUG oslo_concurrency.lockutils [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.794s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.688668] env[68638]: DEBUG nova.objects.instance [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lazy-loading 'pci_requests' on Instance uuid 0249ffb9-82ed-44db-bb20-e619eaa176dd {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1054.771593] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-10667151-951a-405e-9eaa-6fcc4b6e73ee {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.931040] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Releasing lock "refresh_cache-9ddb29ae-9724-4712-af58-4b8d6546c6af" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1054.931516] env[68638]: DEBUG nova.compute.manager [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1054.931791] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1054.932944] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b8f32ce-2225-465a-a912-a9419bf54250 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.940960] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1054.941238] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf22bec9-dff9-4bcf-9ba2-aef9d56301cd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.951155] env[68638]: DEBUG oslo_vmware.api [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Waiting for the task: (returnval){ [ 1054.951155] env[68638]: value = "task-2834256" [ 1054.951155] env[68638]: _type = "Task" [ 1054.951155] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.959081] env[68638]: DEBUG oslo_vmware.api [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834256, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.966054] env[68638]: DEBUG nova.compute.manager [req-cc0564af-3ba1-4d9a-a9bb-9e0e969f0996 req-e51a31e0-8a4f-47ca-95f7-14cc2c14f105 service nova] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Received event network-vif-deleted-d2378ad7-a6bb-4823-9ad4-7ddb4d26a0e3 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1055.107136] env[68638]: INFO nova.compute.manager [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Took 21.42 seconds to build instance. [ 1055.192233] env[68638]: DEBUG nova.compute.utils [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1055.199650] env[68638]: DEBUG nova.objects.instance [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lazy-loading 'numa_topology' on Instance uuid 0249ffb9-82ed-44db-bb20-e619eaa176dd {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.204721] env[68638]: DEBUG nova.compute.manager [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1055.204945] env[68638]: DEBUG nova.network.neutron [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1055.254370] env[68638]: DEBUG nova.policy [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7569a0fd95c644d38ef18de41870bde4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35fdd5447a0546b7b0fe2ed9ea0efc73', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1055.415171] env[68638]: DEBUG nova.compute.manager [req-be5aa983-3665-47f1-b65d-9901ace918f5 req-a04a302f-2d5c-4af3-9ce7-a5b09e2ebdc7 service nova] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Received event network-changed-f197b79d-c838-421c-930b-a1d1d5f1b89c {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1055.415578] env[68638]: DEBUG nova.compute.manager [req-be5aa983-3665-47f1-b65d-9901ace918f5 req-a04a302f-2d5c-4af3-9ce7-a5b09e2ebdc7 service nova] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Refreshing instance network info cache due to event network-changed-f197b79d-c838-421c-930b-a1d1d5f1b89c. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1055.415924] env[68638]: DEBUG oslo_concurrency.lockutils [req-be5aa983-3665-47f1-b65d-9901ace918f5 req-a04a302f-2d5c-4af3-9ce7-a5b09e2ebdc7 service nova] Acquiring lock "refresh_cache-cc2e9758-45ee-4e94-ad74-ba7d6c85f06d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.415924] env[68638]: DEBUG oslo_concurrency.lockutils [req-be5aa983-3665-47f1-b65d-9901ace918f5 req-a04a302f-2d5c-4af3-9ce7-a5b09e2ebdc7 service nova] Acquired lock "refresh_cache-cc2e9758-45ee-4e94-ad74-ba7d6c85f06d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1055.416090] env[68638]: DEBUG nova.network.neutron [req-be5aa983-3665-47f1-b65d-9901ace918f5 req-a04a302f-2d5c-4af3-9ce7-a5b09e2ebdc7 service nova] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Refreshing network info cache for port f197b79d-c838-421c-930b-a1d1d5f1b89c {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1055.468258] env[68638]: DEBUG oslo_vmware.api [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834256, 'name': PowerOffVM_Task, 'duration_secs': 0.185405} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.468258] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1055.468474] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1055.468816] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0515e930-3aec-48c8-90a2-8c482c23b6c0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.502809] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1055.503336] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1055.503999] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Deleting the datastore file [datastore1] 9ddb29ae-9724-4712-af58-4b8d6546c6af {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1055.503999] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c501be11-7b42-43e3-a62b-e7eb838257d7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.516073] env[68638]: DEBUG oslo_vmware.api [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Waiting for the task: (returnval){ [ 1055.516073] env[68638]: value = "task-2834258" [ 1055.516073] env[68638]: _type = "Task" [ 1055.516073] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.535191] env[68638]: DEBUG oslo_vmware.api [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834258, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.608344] env[68638]: DEBUG oslo_concurrency.lockutils [None req-34da55aa-0c9f-48ec-be5e-2bab12976bbc tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "cc2e9758-45ee-4e94-ad74-ba7d6c85f06d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.951s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.614856] env[68638]: DEBUG nova.network.neutron [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Successfully created port: dcfef634-2496-4f1b-ae08-cf8895e7d4a7 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1055.706524] env[68638]: INFO nova.compute.claims [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1055.710644] env[68638]: DEBUG nova.compute.manager [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1056.032432] env[68638]: DEBUG oslo_vmware.api [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Task: {'id': task-2834258, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.194454} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.032770] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1056.033009] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1056.033788] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1056.033788] env[68638]: INFO nova.compute.manager [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1056.033788] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1056.033929] env[68638]: DEBUG nova.compute.manager [-] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1056.034044] env[68638]: DEBUG nova.network.neutron [-] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1056.051832] env[68638]: DEBUG nova.network.neutron [-] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1056.262657] env[68638]: DEBUG nova.network.neutron [req-be5aa983-3665-47f1-b65d-9901ace918f5 req-a04a302f-2d5c-4af3-9ce7-a5b09e2ebdc7 service nova] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Updated VIF entry in instance network info cache for port f197b79d-c838-421c-930b-a1d1d5f1b89c. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1056.263042] env[68638]: DEBUG nova.network.neutron [req-be5aa983-3665-47f1-b65d-9901ace918f5 req-a04a302f-2d5c-4af3-9ce7-a5b09e2ebdc7 service nova] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Updating instance_info_cache with network_info: [{"id": "f197b79d-c838-421c-930b-a1d1d5f1b89c", "address": "fa:16:3e:6e:bd:cb", "network": {"id": "2181efd7-a094-4c4b-8754-da82e89be85a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1274773453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "efa342b9d9a34e9e8e708c8f356f905e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf197b79d-c8", "ovs_interfaceid": "f197b79d-c838-421c-930b-a1d1d5f1b89c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.559735] env[68638]: DEBUG nova.network.neutron [-] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.731800] env[68638]: DEBUG nova.compute.manager [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1056.762418] env[68638]: DEBUG nova.virt.hardware [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1056.762669] env[68638]: DEBUG nova.virt.hardware [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1056.762825] env[68638]: DEBUG nova.virt.hardware [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1056.763014] env[68638]: DEBUG nova.virt.hardware [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1056.763167] env[68638]: DEBUG nova.virt.hardware [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1056.764168] env[68638]: DEBUG nova.virt.hardware [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1056.764168] env[68638]: DEBUG nova.virt.hardware [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1056.764168] env[68638]: DEBUG nova.virt.hardware [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1056.764168] env[68638]: DEBUG nova.virt.hardware [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1056.764168] env[68638]: DEBUG nova.virt.hardware [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1056.770361] env[68638]: DEBUG nova.virt.hardware [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1056.771858] env[68638]: DEBUG oslo_concurrency.lockutils [req-be5aa983-3665-47f1-b65d-9901ace918f5 req-a04a302f-2d5c-4af3-9ce7-a5b09e2ebdc7 service nova] Releasing lock "refresh_cache-cc2e9758-45ee-4e94-ad74-ba7d6c85f06d" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1056.772186] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e7f6921-24f3-4f5e-b7b9-810f659526f5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.788483] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed1a512-910a-4fa5-9811-6d939aa8d065 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.063417] env[68638]: INFO nova.compute.manager [-] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Took 1.03 seconds to deallocate network for instance. [ 1057.064066] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-010447c6-00ef-459a-9d10-1f0d8a1a4af8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.083190] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9c671df-fdf4-4298-9525-a3a1a8831b3f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.125214] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3384b188-7c15-42d9-9566-34dec51d17f9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.135363] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e476665f-ddac-4d5a-bc45-f9e386e86ed6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.157402] env[68638]: DEBUG nova.compute.provider_tree [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1057.206629] env[68638]: DEBUG nova.compute.manager [req-edad7604-69b0-4d7d-979d-686f965c78f2 req-c59191a5-1b3a-4fc4-8bc5-876c8e975a75 service nova] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Received event network-vif-plugged-dcfef634-2496-4f1b-ae08-cf8895e7d4a7 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1057.206948] env[68638]: DEBUG oslo_concurrency.lockutils [req-edad7604-69b0-4d7d-979d-686f965c78f2 req-c59191a5-1b3a-4fc4-8bc5-876c8e975a75 service nova] Acquiring lock "6213446a-f6a4-439b-a1ed-5b8c2234d6ac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.207614] env[68638]: DEBUG oslo_concurrency.lockutils [req-edad7604-69b0-4d7d-979d-686f965c78f2 req-c59191a5-1b3a-4fc4-8bc5-876c8e975a75 service nova] Lock "6213446a-f6a4-439b-a1ed-5b8c2234d6ac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.207614] env[68638]: DEBUG oslo_concurrency.lockutils [req-edad7604-69b0-4d7d-979d-686f965c78f2 req-c59191a5-1b3a-4fc4-8bc5-876c8e975a75 service nova] Lock "6213446a-f6a4-439b-a1ed-5b8c2234d6ac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.207614] env[68638]: DEBUG nova.compute.manager [req-edad7604-69b0-4d7d-979d-686f965c78f2 req-c59191a5-1b3a-4fc4-8bc5-876c8e975a75 service nova] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] No waiting events found dispatching network-vif-plugged-dcfef634-2496-4f1b-ae08-cf8895e7d4a7 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1057.207855] env[68638]: WARNING nova.compute.manager [req-edad7604-69b0-4d7d-979d-686f965c78f2 req-c59191a5-1b3a-4fc4-8bc5-876c8e975a75 service nova] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Received unexpected event network-vif-plugged-dcfef634-2496-4f1b-ae08-cf8895e7d4a7 for instance with vm_state building and task_state spawning. [ 1057.321251] env[68638]: DEBUG nova.network.neutron [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Successfully updated port: dcfef634-2496-4f1b-ae08-cf8895e7d4a7 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1057.581181] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.662580] env[68638]: DEBUG nova.scheduler.client.report [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1057.823859] env[68638]: DEBUG oslo_concurrency.lockutils [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "refresh_cache-6213446a-f6a4-439b-a1ed-5b8c2234d6ac" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.824028] env[68638]: DEBUG oslo_concurrency.lockutils [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired lock "refresh_cache-6213446a-f6a4-439b-a1ed-5b8c2234d6ac" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1057.824257] env[68638]: DEBUG nova.network.neutron [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1058.167539] env[68638]: DEBUG oslo_concurrency.lockutils [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.479s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.170301] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.588s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1058.170553] env[68638]: DEBUG nova.objects.instance [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Lazy-loading 'resources' on Instance uuid da886efd-bca9-45aa-abcc-13832c66a90c {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1058.178023] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.178023] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1058.202176] env[68638]: INFO nova.network.neutron [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Updating port d0023f1c-323c-4f1c-a82c-45ad56565341 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1058.355516] env[68638]: DEBUG nova.network.neutron [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1058.494657] env[68638]: DEBUG nova.network.neutron [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Updating instance_info_cache with network_info: [{"id": "dcfef634-2496-4f1b-ae08-cf8895e7d4a7", "address": "fa:16:3e:2f:46:f3", "network": {"id": "e7719a30-81aa-48f1-a272-5246f78d9891", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1890376720-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fdd5447a0546b7b0fe2ed9ea0efc73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcfef634-24", "ovs_interfaceid": "dcfef634-2496-4f1b-ae08-cf8895e7d4a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.680489] env[68638]: DEBUG nova.compute.manager [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1058.936801] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6561396-24a1-4aaa-9d19-c27660520ca3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.947490] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb1a140-d69b-46a2-9692-822bc8fc07b2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.993979] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd3f3935-22d0-4049-a723-20a15e967097 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.999327] env[68638]: DEBUG oslo_concurrency.lockutils [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Releasing lock "refresh_cache-6213446a-f6a4-439b-a1ed-5b8c2234d6ac" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1058.999863] env[68638]: DEBUG nova.compute.manager [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Instance network_info: |[{"id": "dcfef634-2496-4f1b-ae08-cf8895e7d4a7", "address": "fa:16:3e:2f:46:f3", "network": {"id": "e7719a30-81aa-48f1-a272-5246f78d9891", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1890376720-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fdd5447a0546b7b0fe2ed9ea0efc73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcfef634-24", "ovs_interfaceid": "dcfef634-2496-4f1b-ae08-cf8895e7d4a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1059.000554] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:46:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dcfef634-2496-4f1b-ae08-cf8895e7d4a7', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1059.013443] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1059.014564] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1059.014919] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ec8a1f1f-2141-481b-9b02-3bc1bcfa3a3e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.039045] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21969324-395b-4803-b9e0-a6e0ef50cc31 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.045042] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1059.045042] env[68638]: value = "task-2834260" [ 1059.045042] env[68638]: _type = "Task" [ 1059.045042] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.063550] env[68638]: DEBUG nova.compute.provider_tree [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1059.075022] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834260, 'name': CreateVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.200270] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.248572] env[68638]: DEBUG nova.compute.manager [req-5fa141c8-1292-4d22-a1ac-a9fa1c27f55d req-c01317c4-d336-493a-a13f-28a45a2d3935 service nova] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Received event network-changed-dcfef634-2496-4f1b-ae08-cf8895e7d4a7 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1059.248777] env[68638]: DEBUG nova.compute.manager [req-5fa141c8-1292-4d22-a1ac-a9fa1c27f55d req-c01317c4-d336-493a-a13f-28a45a2d3935 service nova] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Refreshing instance network info cache due to event network-changed-dcfef634-2496-4f1b-ae08-cf8895e7d4a7. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1059.248991] env[68638]: DEBUG oslo_concurrency.lockutils [req-5fa141c8-1292-4d22-a1ac-a9fa1c27f55d req-c01317c4-d336-493a-a13f-28a45a2d3935 service nova] Acquiring lock "refresh_cache-6213446a-f6a4-439b-a1ed-5b8c2234d6ac" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.249153] env[68638]: DEBUG oslo_concurrency.lockutils [req-5fa141c8-1292-4d22-a1ac-a9fa1c27f55d req-c01317c4-d336-493a-a13f-28a45a2d3935 service nova] Acquired lock "refresh_cache-6213446a-f6a4-439b-a1ed-5b8c2234d6ac" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1059.249315] env[68638]: DEBUG nova.network.neutron [req-5fa141c8-1292-4d22-a1ac-a9fa1c27f55d req-c01317c4-d336-493a-a13f-28a45a2d3935 service nova] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Refreshing network info cache for port dcfef634-2496-4f1b-ae08-cf8895e7d4a7 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1059.557457] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834260, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.567672] env[68638]: DEBUG nova.scheduler.client.report [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1059.834353] env[68638]: DEBUG oslo_concurrency.lockutils [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "refresh_cache-0249ffb9-82ed-44db-bb20-e619eaa176dd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.834659] env[68638]: DEBUG oslo_concurrency.lockutils [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquired lock "refresh_cache-0249ffb9-82ed-44db-bb20-e619eaa176dd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1059.835442] env[68638]: DEBUG nova.network.neutron [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1059.965786] env[68638]: DEBUG nova.network.neutron [req-5fa141c8-1292-4d22-a1ac-a9fa1c27f55d req-c01317c4-d336-493a-a13f-28a45a2d3935 service nova] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Updated VIF entry in instance network info cache for port dcfef634-2496-4f1b-ae08-cf8895e7d4a7. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1059.966087] env[68638]: DEBUG nova.network.neutron [req-5fa141c8-1292-4d22-a1ac-a9fa1c27f55d req-c01317c4-d336-493a-a13f-28a45a2d3935 service nova] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Updating instance_info_cache with network_info: [{"id": "dcfef634-2496-4f1b-ae08-cf8895e7d4a7", "address": "fa:16:3e:2f:46:f3", "network": {"id": "e7719a30-81aa-48f1-a272-5246f78d9891", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1890376720-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fdd5447a0546b7b0fe2ed9ea0efc73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcfef634-24", "ovs_interfaceid": "dcfef634-2496-4f1b-ae08-cf8895e7d4a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.058491] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834260, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.072590] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.902s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.075011] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.293s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1060.075268] env[68638]: DEBUG nova.objects.instance [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lazy-loading 'resources' on Instance uuid 4765bf70-1a72-4102-b5d3-ccedb7c383ea {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1060.095010] env[68638]: INFO nova.scheduler.client.report [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Deleted allocations for instance da886efd-bca9-45aa-abcc-13832c66a90c [ 1060.468862] env[68638]: DEBUG oslo_concurrency.lockutils [req-5fa141c8-1292-4d22-a1ac-a9fa1c27f55d req-c01317c4-d336-493a-a13f-28a45a2d3935 service nova] Releasing lock "refresh_cache-6213446a-f6a4-439b-a1ed-5b8c2234d6ac" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.558638] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834260, 'name': CreateVM_Task, 'duration_secs': 1.29268} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.558817] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1060.559489] env[68638]: DEBUG oslo_concurrency.lockutils [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.559589] env[68638]: DEBUG oslo_concurrency.lockutils [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1060.559912] env[68638]: DEBUG oslo_concurrency.lockutils [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1060.560207] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d778ccfe-1893-4306-9c21-7717af576868 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.565572] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1060.565572] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52448c36-7432-71a0-8d35-20a61da29ecd" [ 1060.565572] env[68638]: _type = "Task" [ 1060.565572] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.574330] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52448c36-7432-71a0-8d35-20a61da29ecd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.605443] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d0e2290e-0e36-4ad9-bd8d-93514fd3333f tempest-AttachInterfacesUnderV243Test-1635474751 tempest-AttachInterfacesUnderV243Test-1635474751-project-member] Lock "da886efd-bca9-45aa-abcc-13832c66a90c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.743s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.619585] env[68638]: DEBUG nova.network.neutron [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Updating instance_info_cache with network_info: [{"id": "d0023f1c-323c-4f1c-a82c-45ad56565341", "address": "fa:16:3e:33:9f:b3", "network": {"id": "4723bf0f-71b1-4997-b6ce-06f7dbda84df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-488516254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee5d59c43e974d04ba56981f2716ff60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0023f1c-32", "ovs_interfaceid": "d0023f1c-323c-4f1c-a82c-45ad56565341", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.842014] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69501895-e41a-43fa-9513-8f966a2c5fc6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.851663] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d41392c-adbf-46f2-8d3b-d92d57a79ecc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.883838] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61990705-6d2d-4759-971c-85bb1537b319 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.892307] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53cad4c8-65ff-4142-863f-94697f87c3cf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.906974] env[68638]: DEBUG nova.compute.provider_tree [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1061.094353] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52448c36-7432-71a0-8d35-20a61da29ecd, 'name': SearchDatastore_Task, 'duration_secs': 0.018576} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.096380] env[68638]: DEBUG oslo_concurrency.lockutils [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1061.096856] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1061.097167] env[68638]: DEBUG oslo_concurrency.lockutils [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.097386] env[68638]: DEBUG oslo_concurrency.lockutils [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1061.097647] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1061.097994] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9082e2f9-af6a-44f8-987f-46f1f95d84f2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.118172] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1061.118813] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1061.121029] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3eded6cc-3381-4121-8190-5e1dd07e49d7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.125409] env[68638]: DEBUG oslo_concurrency.lockutils [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Releasing lock "refresh_cache-0249ffb9-82ed-44db-bb20-e619eaa176dd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1061.132155] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1061.132155] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5254c11e-2f60-1c6d-531a-28b86165d2ee" [ 1061.132155] env[68638]: _type = "Task" [ 1061.132155] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.145311] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5254c11e-2f60-1c6d-531a-28b86165d2ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.156305] env[68638]: DEBUG nova.virt.hardware [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='7ac1735eaa0111bb84314abe690899ac',container_format='bare',created_at=2025-03-07T02:35:16Z,direct_url=,disk_format='vmdk',id=f510e66f-bc6c-4bb4-af29-0d55b547e445,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1418448223-shelved',owner='ee5d59c43e974d04ba56981f2716ff60',properties=ImageMetaProps,protected=,size=31668224,status='active',tags=,updated_at=2025-03-07T02:35:33Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1061.156697] env[68638]: DEBUG nova.virt.hardware [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1061.156983] env[68638]: DEBUG nova.virt.hardware [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1061.157256] env[68638]: DEBUG nova.virt.hardware [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1061.157465] env[68638]: DEBUG nova.virt.hardware [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1061.157672] env[68638]: DEBUG nova.virt.hardware [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1061.157964] env[68638]: DEBUG nova.virt.hardware [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1061.158368] env[68638]: DEBUG nova.virt.hardware [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1061.158593] env[68638]: DEBUG nova.virt.hardware [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1061.158837] env[68638]: DEBUG nova.virt.hardware [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1061.159098] env[68638]: DEBUG nova.virt.hardware [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1061.160019] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98288a2c-2599-4d75-b537-c0ce479b3197 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.171055] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dddfbab-2bc2-4c0f-9cec-e1b2747c3cc7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.185337] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:9f:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd1da5fc2-0280-4f76-ac97-20ea4bc7bb16', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd0023f1c-323c-4f1c-a82c-45ad56565341', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1061.193500] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1061.193890] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1061.194186] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-242b36b1-39ed-48cd-9754-5c31d1faf856 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.217795] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1061.217795] env[68638]: value = "task-2834262" [ 1061.217795] env[68638]: _type = "Task" [ 1061.217795] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.228283] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834262, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.284953] env[68638]: DEBUG nova.compute.manager [req-1c867fd9-f6eb-4cfb-afac-68b85bea31c2 req-cb02fbe3-b1b0-4890-94e1-0fe19722cde3 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Received event network-vif-plugged-d0023f1c-323c-4f1c-a82c-45ad56565341 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1061.285621] env[68638]: DEBUG oslo_concurrency.lockutils [req-1c867fd9-f6eb-4cfb-afac-68b85bea31c2 req-cb02fbe3-b1b0-4890-94e1-0fe19722cde3 service nova] Acquiring lock "0249ffb9-82ed-44db-bb20-e619eaa176dd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.286399] env[68638]: DEBUG oslo_concurrency.lockutils [req-1c867fd9-f6eb-4cfb-afac-68b85bea31c2 req-cb02fbe3-b1b0-4890-94e1-0fe19722cde3 service nova] Lock "0249ffb9-82ed-44db-bb20-e619eaa176dd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.286399] env[68638]: DEBUG oslo_concurrency.lockutils [req-1c867fd9-f6eb-4cfb-afac-68b85bea31c2 req-cb02fbe3-b1b0-4890-94e1-0fe19722cde3 service nova] Lock "0249ffb9-82ed-44db-bb20-e619eaa176dd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.286556] env[68638]: DEBUG nova.compute.manager [req-1c867fd9-f6eb-4cfb-afac-68b85bea31c2 req-cb02fbe3-b1b0-4890-94e1-0fe19722cde3 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] No waiting events found dispatching network-vif-plugged-d0023f1c-323c-4f1c-a82c-45ad56565341 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1061.286837] env[68638]: WARNING nova.compute.manager [req-1c867fd9-f6eb-4cfb-afac-68b85bea31c2 req-cb02fbe3-b1b0-4890-94e1-0fe19722cde3 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Received unexpected event network-vif-plugged-d0023f1c-323c-4f1c-a82c-45ad56565341 for instance with vm_state shelved_offloaded and task_state spawning. [ 1061.287103] env[68638]: DEBUG nova.compute.manager [req-1c867fd9-f6eb-4cfb-afac-68b85bea31c2 req-cb02fbe3-b1b0-4890-94e1-0fe19722cde3 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Received event network-changed-d0023f1c-323c-4f1c-a82c-45ad56565341 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1061.287406] env[68638]: DEBUG nova.compute.manager [req-1c867fd9-f6eb-4cfb-afac-68b85bea31c2 req-cb02fbe3-b1b0-4890-94e1-0fe19722cde3 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Refreshing instance network info cache due to event network-changed-d0023f1c-323c-4f1c-a82c-45ad56565341. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1061.287824] env[68638]: DEBUG oslo_concurrency.lockutils [req-1c867fd9-f6eb-4cfb-afac-68b85bea31c2 req-cb02fbe3-b1b0-4890-94e1-0fe19722cde3 service nova] Acquiring lock "refresh_cache-0249ffb9-82ed-44db-bb20-e619eaa176dd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.287876] env[68638]: DEBUG oslo_concurrency.lockutils [req-1c867fd9-f6eb-4cfb-afac-68b85bea31c2 req-cb02fbe3-b1b0-4890-94e1-0fe19722cde3 service nova] Acquired lock "refresh_cache-0249ffb9-82ed-44db-bb20-e619eaa176dd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1061.288166] env[68638]: DEBUG nova.network.neutron [req-1c867fd9-f6eb-4cfb-afac-68b85bea31c2 req-cb02fbe3-b1b0-4890-94e1-0fe19722cde3 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Refreshing network info cache for port d0023f1c-323c-4f1c-a82c-45ad56565341 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1061.410736] env[68638]: DEBUG nova.scheduler.client.report [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1061.643655] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5254c11e-2f60-1c6d-531a-28b86165d2ee, 'name': SearchDatastore_Task, 'duration_secs': 0.019517} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.644562] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1eb5bbb4-c3ad-4391-8c1e-8b9d18b7d884 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.650585] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1061.650585] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]523f0e1d-e197-f04b-3d5c-b5ff28ae5cd7" [ 1061.650585] env[68638]: _type = "Task" [ 1061.650585] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.660697] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523f0e1d-e197-f04b-3d5c-b5ff28ae5cd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.729364] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834262, 'name': CreateVM_Task, 'duration_secs': 0.416065} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.729614] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1061.730462] env[68638]: DEBUG oslo_concurrency.lockutils [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f510e66f-bc6c-4bb4-af29-0d55b547e445" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.730691] env[68638]: DEBUG oslo_concurrency.lockutils [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f510e66f-bc6c-4bb4-af29-0d55b547e445" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1061.731078] env[68638]: DEBUG oslo_concurrency.lockutils [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f510e66f-bc6c-4bb4-af29-0d55b547e445" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1061.731393] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-989ea271-8971-4645-be53-f4500cbc1e27 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.740345] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1061.740345] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e84675-8b43-4106-7f0e-85fb202b8fac" [ 1061.740345] env[68638]: _type = "Task" [ 1061.740345] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.758984] env[68638]: DEBUG oslo_concurrency.lockutils [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f510e66f-bc6c-4bb4-af29-0d55b547e445" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1061.759280] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Processing image f510e66f-bc6c-4bb4-af29-0d55b547e445 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1061.759524] env[68638]: DEBUG oslo_concurrency.lockutils [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f510e66f-bc6c-4bb4-af29-0d55b547e445/f510e66f-bc6c-4bb4-af29-0d55b547e445.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.759670] env[68638]: DEBUG oslo_concurrency.lockutils [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f510e66f-bc6c-4bb4-af29-0d55b547e445/f510e66f-bc6c-4bb4-af29-0d55b547e445.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1061.759843] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1061.760137] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-39754494-8d81-4076-b4e4-df1786c57349 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.769015] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1061.769306] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1061.770165] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5a38701-243b-4b78-9054-eabc1b9ec953 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.776450] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1061.776450] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]520c81dd-e036-6cb9-ebb9-821e7e351de5" [ 1061.776450] env[68638]: _type = "Task" [ 1061.776450] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.790565] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]520c81dd-e036-6cb9-ebb9-821e7e351de5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.919078] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.844s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.921502] env[68638]: DEBUG oslo_concurrency.lockutils [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.727s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.923224] env[68638]: INFO nova.compute.claims [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1061.946025] env[68638]: INFO nova.scheduler.client.report [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Deleted allocations for instance 4765bf70-1a72-4102-b5d3-ccedb7c383ea [ 1062.059756] env[68638]: DEBUG nova.network.neutron [req-1c867fd9-f6eb-4cfb-afac-68b85bea31c2 req-cb02fbe3-b1b0-4890-94e1-0fe19722cde3 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Updated VIF entry in instance network info cache for port d0023f1c-323c-4f1c-a82c-45ad56565341. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1062.060276] env[68638]: DEBUG nova.network.neutron [req-1c867fd9-f6eb-4cfb-afac-68b85bea31c2 req-cb02fbe3-b1b0-4890-94e1-0fe19722cde3 service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Updating instance_info_cache with network_info: [{"id": "d0023f1c-323c-4f1c-a82c-45ad56565341", "address": "fa:16:3e:33:9f:b3", "network": {"id": "4723bf0f-71b1-4997-b6ce-06f7dbda84df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-488516254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee5d59c43e974d04ba56981f2716ff60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0023f1c-32", "ovs_interfaceid": "d0023f1c-323c-4f1c-a82c-45ad56565341", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.162378] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523f0e1d-e197-f04b-3d5c-b5ff28ae5cd7, 'name': SearchDatastore_Task, 'duration_secs': 0.018608} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.162677] env[68638]: DEBUG oslo_concurrency.lockutils [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1062.163114] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 6213446a-f6a4-439b-a1ed-5b8c2234d6ac/6213446a-f6a4-439b-a1ed-5b8c2234d6ac.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1062.163517] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1678503-999d-4ca5-ba40-ee4143035e7b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.174393] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1062.174393] env[68638]: value = "task-2834264" [ 1062.174393] env[68638]: _type = "Task" [ 1062.174393] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.189976] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834264, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.215196] env[68638]: DEBUG oslo_vmware.rw_handles [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525c6aa7-1a4f-e12e-5aed-ed5ee79f566e/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1062.216251] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c61f39-89f2-40cd-97db-00b503c5684e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.224015] env[68638]: DEBUG oslo_vmware.rw_handles [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525c6aa7-1a4f-e12e-5aed-ed5ee79f566e/disk-0.vmdk is in state: ready. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1062.224212] env[68638]: ERROR oslo_vmware.rw_handles [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525c6aa7-1a4f-e12e-5aed-ed5ee79f566e/disk-0.vmdk due to incomplete transfer. [ 1062.224452] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-cea37357-9a73-498b-95cb-42de5898a8c5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.233146] env[68638]: DEBUG oslo_vmware.rw_handles [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525c6aa7-1a4f-e12e-5aed-ed5ee79f566e/disk-0.vmdk. {{(pid=68638) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1062.233391] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Uploaded image aa7b0f32-eba3-4b43-b887-66db6ba8a8cd to the Glance image server {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1062.235897] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Destroying the VM {{(pid=68638) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1062.236289] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8478c91d-4d6b-4617-97c8-2bc7ba300d5f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.243052] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1062.243052] env[68638]: value = "task-2834265" [ 1062.243052] env[68638]: _type = "Task" [ 1062.243052] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.252221] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834265, 'name': Destroy_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.290047] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Preparing fetch location {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1062.290217] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Fetch image to [datastore2] OSTACK_IMG_b0e78e0a-7d0f-4fb2-b368-e53ca1f0ce14/OSTACK_IMG_b0e78e0a-7d0f-4fb2-b368-e53ca1f0ce14.vmdk {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1062.290461] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Downloading stream optimized image f510e66f-bc6c-4bb4-af29-0d55b547e445 to [datastore2] OSTACK_IMG_b0e78e0a-7d0f-4fb2-b368-e53ca1f0ce14/OSTACK_IMG_b0e78e0a-7d0f-4fb2-b368-e53ca1f0ce14.vmdk on the data store datastore2 as vApp {{(pid=68638) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1062.290698] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Downloading image file data f510e66f-bc6c-4bb4-af29-0d55b547e445 to the ESX as VM named 'OSTACK_IMG_b0e78e0a-7d0f-4fb2-b368-e53ca1f0ce14' {{(pid=68638) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1062.378724] env[68638]: DEBUG oslo_vmware.rw_handles [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1062.378724] env[68638]: value = "resgroup-9" [ 1062.378724] env[68638]: _type = "ResourcePool" [ 1062.378724] env[68638]: }. {{(pid=68638) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1062.379087] env[68638]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-2770bf8d-1362-4734-b275-7deb840ac46a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.407406] env[68638]: DEBUG oslo_vmware.rw_handles [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lease: (returnval){ [ 1062.407406] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528b3527-98b0-a5e9-bcaa-769078e35b22" [ 1062.407406] env[68638]: _type = "HttpNfcLease" [ 1062.407406] env[68638]: } obtained for vApp import into resource pool (val){ [ 1062.407406] env[68638]: value = "resgroup-9" [ 1062.407406] env[68638]: _type = "ResourcePool" [ 1062.407406] env[68638]: }. {{(pid=68638) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1062.407707] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the lease: (returnval){ [ 1062.407707] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528b3527-98b0-a5e9-bcaa-769078e35b22" [ 1062.407707] env[68638]: _type = "HttpNfcLease" [ 1062.407707] env[68638]: } to be ready. {{(pid=68638) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1062.417208] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1062.417208] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528b3527-98b0-a5e9-bcaa-769078e35b22" [ 1062.417208] env[68638]: _type = "HttpNfcLease" [ 1062.417208] env[68638]: } is initializing. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1062.457029] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e9893890-988f-40ae-9a7a-79e75ec2d406 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "4765bf70-1a72-4102-b5d3-ccedb7c383ea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.669s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1062.563067] env[68638]: DEBUG oslo_concurrency.lockutils [req-1c867fd9-f6eb-4cfb-afac-68b85bea31c2 req-cb02fbe3-b1b0-4890-94e1-0fe19722cde3 service nova] Releasing lock "refresh_cache-0249ffb9-82ed-44db-bb20-e619eaa176dd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1062.685789] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834264, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.758313] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834265, 'name': Destroy_Task, 'duration_secs': 0.490724} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.758697] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Destroyed the VM [ 1062.759115] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Deleting Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1062.759429] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9e526233-b22a-44b9-8c38-51156c089491 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.767912] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1062.767912] env[68638]: value = "task-2834267" [ 1062.767912] env[68638]: _type = "Task" [ 1062.767912] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.782500] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834267, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.918424] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1062.918424] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528b3527-98b0-a5e9-bcaa-769078e35b22" [ 1062.918424] env[68638]: _type = "HttpNfcLease" [ 1062.918424] env[68638]: } is initializing. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1063.176201] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e2b6e6-73c4-4e61-8981-15098a058595 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.189892] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834264, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.607411} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.190265] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 6213446a-f6a4-439b-a1ed-5b8c2234d6ac/6213446a-f6a4-439b-a1ed-5b8c2234d6ac.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1063.190488] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1063.191510] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab4293d9-c6a4-4cc9-b883-01b9536dc786 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.194449] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-141cb04a-e0f9-4096-aeef-99d55e831a55 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.227520] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1063.227520] env[68638]: value = "task-2834268" [ 1063.227520] env[68638]: _type = "Task" [ 1063.227520] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.228617] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-191be528-0382-4827-9ad8-d5d009b91f36 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.243621] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8ba96d-810e-4e68-af9e-51713745c500 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.247450] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834268, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.257749] env[68638]: DEBUG nova.compute.provider_tree [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1063.280045] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834267, 'name': RemoveSnapshot_Task, 'duration_secs': 0.369111} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.280168] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Deleted Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1063.280493] env[68638]: DEBUG nova.compute.manager [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1063.281312] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f561a675-44b0-4091-b5ee-7d315fd0b20d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.416786] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1063.416786] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528b3527-98b0-a5e9-bcaa-769078e35b22" [ 1063.416786] env[68638]: _type = "HttpNfcLease" [ 1063.416786] env[68638]: } is ready. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1063.417120] env[68638]: DEBUG oslo_vmware.rw_handles [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1063.417120] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528b3527-98b0-a5e9-bcaa-769078e35b22" [ 1063.417120] env[68638]: _type = "HttpNfcLease" [ 1063.417120] env[68638]: }. {{(pid=68638) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1063.417863] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54172e87-0efb-4f7b-be59-e768242f2574 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.425145] env[68638]: DEBUG oslo_vmware.rw_handles [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d89588-208f-c76a-e26d-a9ca4145a1a2/disk-0.vmdk from lease info. {{(pid=68638) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1063.425344] env[68638]: DEBUG oslo_vmware.rw_handles [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Creating HTTP connection to write to file with size = 31668224 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d89588-208f-c76a-e26d-a9ca4145a1a2/disk-0.vmdk. {{(pid=68638) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1063.490321] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6d6861a1-62dd-4b96-95f8-5484c88d0411 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.744746] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834268, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064638} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.745046] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1063.745972] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e16f4bc1-1ad2-4c8a-b51b-5081a3ab4140 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.762754] env[68638]: DEBUG nova.scheduler.client.report [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1063.775220] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] 6213446a-f6a4-439b-a1ed-5b8c2234d6ac/6213446a-f6a4-439b-a1ed-5b8c2234d6ac.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1063.780782] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5cdf960f-92a4-4940-aba4-edcd07ec43ee {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.804787] env[68638]: INFO nova.compute.manager [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Shelve offloading [ 1063.813285] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1063.813285] env[68638]: value = "task-2834270" [ 1063.813285] env[68638]: _type = "Task" [ 1063.813285] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.826625] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834270, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.280510] env[68638]: DEBUG oslo_concurrency.lockutils [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.359s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1064.281128] env[68638]: DEBUG nova.compute.manager [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1064.284123] env[68638]: DEBUG oslo_concurrency.lockutils [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.906s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.284350] env[68638]: DEBUG nova.objects.instance [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lazy-loading 'resources' on Instance uuid 1b176c5d-e77c-410b-b282-b7bba65359a9 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1064.313658] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1064.314190] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-19516f40-1feb-485d-aad8-550ee30f9215 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.327038] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1064.327038] env[68638]: value = "task-2834271" [ 1064.327038] env[68638]: _type = "Task" [ 1064.327038] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.329924] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834270, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.340354] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] VM already powered off {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1064.340592] env[68638]: DEBUG nova.compute.manager [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1064.341413] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e14c96-272d-4ee7-9a7e-a0ed1a5e1058 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.348348] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "refresh_cache-0be6f174-fad2-4ee3-be07-b6190073b40c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.349029] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired lock "refresh_cache-0be6f174-fad2-4ee3-be07-b6190073b40c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1064.349029] env[68638]: DEBUG nova.network.neutron [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1064.580320] env[68638]: DEBUG oslo_vmware.rw_handles [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Completed reading data from the image iterator. {{(pid=68638) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1064.580649] env[68638]: DEBUG oslo_vmware.rw_handles [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d89588-208f-c76a-e26d-a9ca4145a1a2/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1064.581640] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a717e8c-b648-4d92-9d03-2c911817a1d3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.590825] env[68638]: DEBUG oslo_vmware.rw_handles [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d89588-208f-c76a-e26d-a9ca4145a1a2/disk-0.vmdk is in state: ready. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1064.591056] env[68638]: DEBUG oslo_vmware.rw_handles [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d89588-208f-c76a-e26d-a9ca4145a1a2/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1064.591349] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-68acf6ef-0f4f-4e2b-8f0b-eb4e7b4430a8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.787367] env[68638]: DEBUG nova.compute.utils [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1064.789022] env[68638]: DEBUG nova.compute.manager [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1064.789213] env[68638]: DEBUG nova.network.neutron [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1064.820243] env[68638]: DEBUG oslo_vmware.rw_handles [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d89588-208f-c76a-e26d-a9ca4145a1a2/disk-0.vmdk. {{(pid=68638) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1064.820373] env[68638]: INFO nova.virt.vmwareapi.images [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Downloaded image file data f510e66f-bc6c-4bb4-af29-0d55b547e445 [ 1064.821639] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-507e7117-2dd6-4209-ba16-cefcb4e55628 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.827217] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834270, 'name': ReconfigVM_Task, 'duration_secs': 0.779046} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.829914] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Reconfigured VM instance instance-00000064 to attach disk [datastore2] 6213446a-f6a4-439b-a1ed-5b8c2234d6ac/6213446a-f6a4-439b-a1ed-5b8c2234d6ac.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1064.830743] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cd3b7f88-6f58-47e6-98c8-7e322d49f8ff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.844295] env[68638]: DEBUG nova.policy [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '16c7d60e31234230b817fc1778234251', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3e7777e8e5d342d68e2f54e23d125314', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1064.847976] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8c50f8b6-4c4d-4b1b-b271-426fc40cbc86 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.854625] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1064.854625] env[68638]: value = "task-2834272" [ 1064.854625] env[68638]: _type = "Task" [ 1064.854625] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.876887] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834272, 'name': Rename_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.884352] env[68638]: INFO nova.virt.vmwareapi.images [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] The imported VM was unregistered [ 1064.886975] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Caching image {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1064.887231] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Creating directory with path [datastore2] devstack-image-cache_base/f510e66f-bc6c-4bb4-af29-0d55b547e445 {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1064.889988] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fe22fd95-9b94-435e-8a23-242a87f7b22c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.904469] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Created directory with path [datastore2] devstack-image-cache_base/f510e66f-bc6c-4bb4-af29-0d55b547e445 {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1064.905158] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_b0e78e0a-7d0f-4fb2-b368-e53ca1f0ce14/OSTACK_IMG_b0e78e0a-7d0f-4fb2-b368-e53ca1f0ce14.vmdk to [datastore2] devstack-image-cache_base/f510e66f-bc6c-4bb4-af29-0d55b547e445/f510e66f-bc6c-4bb4-af29-0d55b547e445.vmdk. {{(pid=68638) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1064.905158] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-18f64a2f-0640-443f-ad98-dff15f605259 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.913624] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1064.913624] env[68638]: value = "task-2834274" [ 1064.913624] env[68638]: _type = "Task" [ 1064.913624] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.921546] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834274, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.067667] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b1e5e6-a0f3-42b4-96c8-d8191dbaac76 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.075479] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42eaf4f5-9aad-45b8-a82b-af5eebda6557 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.111386] env[68638]: DEBUG nova.network.neutron [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Updating instance_info_cache with network_info: [{"id": "c0117ec5-bfa5-418d-8a27-8904ffcfadbd", "address": "fa:16:3e:59:19:81", "network": {"id": "4ccf9e56-9fb3-48ff-bf2d-a35faedb905b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1191830363-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ae89c3992e04141bf24be9d9e84e302", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0117ec5-bf", "ovs_interfaceid": "c0117ec5-bfa5-418d-8a27-8904ffcfadbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.113409] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8a9993-d135-4e6a-97b1-2fbf56fb9756 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.122609] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c92a337-a8b7-4072-8390-e8046e79fe3b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.139755] env[68638]: DEBUG nova.compute.provider_tree [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1065.229229] env[68638]: DEBUG nova.network.neutron [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Successfully created port: acf48d4a-b19e-47d9-a807-d221c4f0fd05 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1065.295452] env[68638]: DEBUG nova.compute.manager [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1065.365269] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834272, 'name': Rename_Task, 'duration_secs': 0.164398} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.365532] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1065.365798] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dfdc0e8a-79f6-4041-9edc-768deea6b176 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.374988] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1065.374988] env[68638]: value = "task-2834275" [ 1065.374988] env[68638]: _type = "Task" [ 1065.374988] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.384123] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834275, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.425656] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834274, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.617511] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Releasing lock "refresh_cache-0be6f174-fad2-4ee3-be07-b6190073b40c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1065.643041] env[68638]: DEBUG nova.scheduler.client.report [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1065.889094] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834275, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.926742] env[68638]: DEBUG nova.compute.manager [req-96ba0922-0f2d-415c-a0cb-5b1964d908be req-092071f7-5f9d-45c3-8a5e-2a501689db35 service nova] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Received event network-vif-unplugged-c0117ec5-bfa5-418d-8a27-8904ffcfadbd {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1065.926742] env[68638]: DEBUG oslo_concurrency.lockutils [req-96ba0922-0f2d-415c-a0cb-5b1964d908be req-092071f7-5f9d-45c3-8a5e-2a501689db35 service nova] Acquiring lock "0be6f174-fad2-4ee3-be07-b6190073b40c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1065.926742] env[68638]: DEBUG oslo_concurrency.lockutils [req-96ba0922-0f2d-415c-a0cb-5b1964d908be req-092071f7-5f9d-45c3-8a5e-2a501689db35 service nova] Lock "0be6f174-fad2-4ee3-be07-b6190073b40c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1065.926742] env[68638]: DEBUG oslo_concurrency.lockutils [req-96ba0922-0f2d-415c-a0cb-5b1964d908be req-092071f7-5f9d-45c3-8a5e-2a501689db35 service nova] Lock "0be6f174-fad2-4ee3-be07-b6190073b40c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.926742] env[68638]: DEBUG nova.compute.manager [req-96ba0922-0f2d-415c-a0cb-5b1964d908be req-092071f7-5f9d-45c3-8a5e-2a501689db35 service nova] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] No waiting events found dispatching network-vif-unplugged-c0117ec5-bfa5-418d-8a27-8904ffcfadbd {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1065.926742] env[68638]: WARNING nova.compute.manager [req-96ba0922-0f2d-415c-a0cb-5b1964d908be req-092071f7-5f9d-45c3-8a5e-2a501689db35 service nova] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Received unexpected event network-vif-unplugged-c0117ec5-bfa5-418d-8a27-8904ffcfadbd for instance with vm_state shelved and task_state shelving_offloading. [ 1065.929503] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834274, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.974551] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1065.975684] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5049245d-e827-4aa4-bfbe-7cd5e6664281 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.984550] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1065.984851] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-408d46ae-c831-4083-a60c-57a6599758dd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.084668] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1066.084668] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1066.084925] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Deleting the datastore file [datastore2] 0be6f174-fad2-4ee3-be07-b6190073b40c {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1066.085016] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2aaab2e2-e867-4a87-8591-29aa78f434b5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.091930] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1066.091930] env[68638]: value = "task-2834278" [ 1066.091930] env[68638]: _type = "Task" [ 1066.091930] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.103265] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834278, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.152477] env[68638]: DEBUG oslo_concurrency.lockutils [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.865s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.155238] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.695s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.155555] env[68638]: DEBUG nova.objects.instance [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Lazy-loading 'resources' on Instance uuid 2cdcff10-089b-47fd-ba41-2e3a75cd33b0 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.188421] env[68638]: INFO nova.scheduler.client.report [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Deleted allocations for instance 1b176c5d-e77c-410b-b282-b7bba65359a9 [ 1066.307656] env[68638]: DEBUG nova.compute.manager [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1066.335071] env[68638]: DEBUG nova.virt.hardware [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1066.335373] env[68638]: DEBUG nova.virt.hardware [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1066.335538] env[68638]: DEBUG nova.virt.hardware [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1066.335744] env[68638]: DEBUG nova.virt.hardware [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1066.335917] env[68638]: DEBUG nova.virt.hardware [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1066.336137] env[68638]: DEBUG nova.virt.hardware [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1066.336379] env[68638]: DEBUG nova.virt.hardware [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1066.336539] env[68638]: DEBUG nova.virt.hardware [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1066.336710] env[68638]: DEBUG nova.virt.hardware [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1066.336875] env[68638]: DEBUG nova.virt.hardware [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1066.337070] env[68638]: DEBUG nova.virt.hardware [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1066.338016] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37c24ca5-7bc7-47a6-ae02-1bfb2a82d313 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.347250] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e82d903-7d62-4a7d-8aaa-8b734cf6c748 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.386037] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834275, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.425381] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834274, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.603561] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834278, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.676187] env[68638]: DEBUG nova.compute.manager [req-b139bae0-a9c7-4016-a8bf-70a8b68f69a8 req-a0190e0b-8936-48ea-ae9a-755c95f381c3 service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Received event network-vif-plugged-acf48d4a-b19e-47d9-a807-d221c4f0fd05 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1066.676456] env[68638]: DEBUG oslo_concurrency.lockutils [req-b139bae0-a9c7-4016-a8bf-70a8b68f69a8 req-a0190e0b-8936-48ea-ae9a-755c95f381c3 service nova] Acquiring lock "ace44b04-6dcf-4845-af4e-b28ddeebe60e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.676702] env[68638]: DEBUG oslo_concurrency.lockutils [req-b139bae0-a9c7-4016-a8bf-70a8b68f69a8 req-a0190e0b-8936-48ea-ae9a-755c95f381c3 service nova] Lock "ace44b04-6dcf-4845-af4e-b28ddeebe60e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.676908] env[68638]: DEBUG oslo_concurrency.lockutils [req-b139bae0-a9c7-4016-a8bf-70a8b68f69a8 req-a0190e0b-8936-48ea-ae9a-755c95f381c3 service nova] Lock "ace44b04-6dcf-4845-af4e-b28ddeebe60e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.677249] env[68638]: DEBUG nova.compute.manager [req-b139bae0-a9c7-4016-a8bf-70a8b68f69a8 req-a0190e0b-8936-48ea-ae9a-755c95f381c3 service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] No waiting events found dispatching network-vif-plugged-acf48d4a-b19e-47d9-a807-d221c4f0fd05 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1066.677477] env[68638]: WARNING nova.compute.manager [req-b139bae0-a9c7-4016-a8bf-70a8b68f69a8 req-a0190e0b-8936-48ea-ae9a-755c95f381c3 service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Received unexpected event network-vif-plugged-acf48d4a-b19e-47d9-a807-d221c4f0fd05 for instance with vm_state building and task_state spawning. [ 1066.701586] env[68638]: DEBUG oslo_concurrency.lockutils [None req-feac569f-888a-4d97-9b32-b780a454c36c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "1b176c5d-e77c-410b-b282-b7bba65359a9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.322s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.791663] env[68638]: DEBUG nova.network.neutron [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Successfully updated port: acf48d4a-b19e-47d9-a807-d221c4f0fd05 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1066.892015] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834275, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.921824] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-184e3a01-6e72-4a0c-a45b-42a1fffaec05 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.937105] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834274, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.938458] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33751ec-19ba-4a31-9b94-453cd7ccd054 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.977671] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e181a5-db29-44d9-92a7-87381ded8232 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.987023] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe53573-7103-473b-832b-93eedb75d7d8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.005547] env[68638]: DEBUG nova.compute.provider_tree [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1067.109439] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834278, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.294779] env[68638]: DEBUG oslo_concurrency.lockutils [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "refresh_cache-ace44b04-6dcf-4845-af4e-b28ddeebe60e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.294969] env[68638]: DEBUG oslo_concurrency.lockutils [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquired lock "refresh_cache-ace44b04-6dcf-4845-af4e-b28ddeebe60e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1067.295104] env[68638]: DEBUG nova.network.neutron [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1067.389640] env[68638]: DEBUG oslo_vmware.api [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834275, 'name': PowerOnVM_Task, 'duration_secs': 1.629021} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.390186] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1067.390246] env[68638]: INFO nova.compute.manager [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Took 10.66 seconds to spawn the instance on the hypervisor. [ 1067.390457] env[68638]: DEBUG nova.compute.manager [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1067.391502] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff54b6c-4a09-41de-bbc1-22b4c29496bd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.428868] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834274, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.510467] env[68638]: DEBUG nova.scheduler.client.report [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1067.605154] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834278, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.835139] env[68638]: DEBUG nova.network.neutron [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1067.909628] env[68638]: INFO nova.compute.manager [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Took 21.51 seconds to build instance. [ 1067.929142] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834274, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.797963} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.929142] env[68638]: INFO nova.virt.vmwareapi.ds_util [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_b0e78e0a-7d0f-4fb2-b368-e53ca1f0ce14/OSTACK_IMG_b0e78e0a-7d0f-4fb2-b368-e53ca1f0ce14.vmdk to [datastore2] devstack-image-cache_base/f510e66f-bc6c-4bb4-af29-0d55b547e445/f510e66f-bc6c-4bb4-af29-0d55b547e445.vmdk. [ 1067.929142] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Cleaning up location [datastore2] OSTACK_IMG_b0e78e0a-7d0f-4fb2-b368-e53ca1f0ce14 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1067.929644] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_b0e78e0a-7d0f-4fb2-b368-e53ca1f0ce14 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1067.929644] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1bf3502f-612d-4abf-bead-2590553fbc19 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.939623] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1067.939623] env[68638]: value = "task-2834279" [ 1067.939623] env[68638]: _type = "Task" [ 1067.939623] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.948944] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834279, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.963607] env[68638]: DEBUG nova.compute.manager [req-dbe2b54f-a160-410a-8bd4-0ce1b7b0cd8b req-fe21e648-a4b2-4f87-9e16-4f9036078b49 service nova] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Received event network-changed-c0117ec5-bfa5-418d-8a27-8904ffcfadbd {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1067.963945] env[68638]: DEBUG nova.compute.manager [req-dbe2b54f-a160-410a-8bd4-0ce1b7b0cd8b req-fe21e648-a4b2-4f87-9e16-4f9036078b49 service nova] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Refreshing instance network info cache due to event network-changed-c0117ec5-bfa5-418d-8a27-8904ffcfadbd. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1067.964030] env[68638]: DEBUG oslo_concurrency.lockutils [req-dbe2b54f-a160-410a-8bd4-0ce1b7b0cd8b req-fe21e648-a4b2-4f87-9e16-4f9036078b49 service nova] Acquiring lock "refresh_cache-0be6f174-fad2-4ee3-be07-b6190073b40c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.964156] env[68638]: DEBUG oslo_concurrency.lockutils [req-dbe2b54f-a160-410a-8bd4-0ce1b7b0cd8b req-fe21e648-a4b2-4f87-9e16-4f9036078b49 service nova] Acquired lock "refresh_cache-0be6f174-fad2-4ee3-be07-b6190073b40c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1067.964316] env[68638]: DEBUG nova.network.neutron [req-dbe2b54f-a160-410a-8bd4-0ce1b7b0cd8b req-fe21e648-a4b2-4f87-9e16-4f9036078b49 service nova] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Refreshing network info cache for port c0117ec5-bfa5-418d-8a27-8904ffcfadbd {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1068.017142] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.862s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.019724] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.922s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.020131] env[68638]: DEBUG nova.objects.instance [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lazy-loading 'resources' on Instance uuid 43e0eed3-bc25-476d-a9ef-6b132514cf90 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1068.030935] env[68638]: DEBUG nova.network.neutron [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Updating instance_info_cache with network_info: [{"id": "acf48d4a-b19e-47d9-a807-d221c4f0fd05", "address": "fa:16:3e:93:77:d3", "network": {"id": "5f368894-f202-48ed-bdd5-62442b47a35d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2025484418-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e7777e8e5d342d68e2f54e23d125314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacf48d4a-b1", "ovs_interfaceid": "acf48d4a-b19e-47d9-a807-d221c4f0fd05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.048569] env[68638]: INFO nova.scheduler.client.report [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Deleted allocations for instance 2cdcff10-089b-47fd-ba41-2e3a75cd33b0 [ 1068.107497] env[68638]: DEBUG oslo_vmware.api [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834278, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.604311} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.107559] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1068.107844] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1068.107989] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1068.126104] env[68638]: INFO nova.scheduler.client.report [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Deleted allocations for instance 0be6f174-fad2-4ee3-be07-b6190073b40c [ 1068.411947] env[68638]: DEBUG oslo_concurrency.lockutils [None req-617cfd22-9d03-42b3-92aa-c4249c36895d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "6213446a-f6a4-439b-a1ed-5b8c2234d6ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.026s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.460924] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834279, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184966} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.462059] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1068.462059] env[68638]: DEBUG oslo_concurrency.lockutils [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f510e66f-bc6c-4bb4-af29-0d55b547e445/f510e66f-bc6c-4bb4-af29-0d55b547e445.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1068.462059] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f510e66f-bc6c-4bb4-af29-0d55b547e445/f510e66f-bc6c-4bb4-af29-0d55b547e445.vmdk to [datastore2] 0249ffb9-82ed-44db-bb20-e619eaa176dd/0249ffb9-82ed-44db-bb20-e619eaa176dd.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1068.462317] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba7edc9c-c481-4a60-b1e0-abc2475d61b3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.470702] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1068.470702] env[68638]: value = "task-2834280" [ 1068.470702] env[68638]: _type = "Task" [ 1068.470702] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.479774] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834280, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.535250] env[68638]: DEBUG oslo_concurrency.lockutils [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Releasing lock "refresh_cache-ace44b04-6dcf-4845-af4e-b28ddeebe60e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1068.535679] env[68638]: DEBUG nova.compute.manager [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Instance network_info: |[{"id": "acf48d4a-b19e-47d9-a807-d221c4f0fd05", "address": "fa:16:3e:93:77:d3", "network": {"id": "5f368894-f202-48ed-bdd5-62442b47a35d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2025484418-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e7777e8e5d342d68e2f54e23d125314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacf48d4a-b1", "ovs_interfaceid": "acf48d4a-b19e-47d9-a807-d221c4f0fd05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1068.536301] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:77:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2907cce-d529-4809-af05-d29397bed211', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'acf48d4a-b19e-47d9-a807-d221c4f0fd05', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1068.544107] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1068.549022] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1068.549453] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c7faac3c-9d0c-416c-9617-848f22b3d666 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.569859] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe3f4e42-8c46-45f5-9765-bc173e39f2c5 tempest-ServerShowV257Test-1388306648 tempest-ServerShowV257Test-1388306648-project-member] Lock "2cdcff10-089b-47fd-ba41-2e3a75cd33b0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.315s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.575749] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1068.575749] env[68638]: value = "task-2834281" [ 1068.575749] env[68638]: _type = "Task" [ 1068.575749] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.591152] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834281, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.631648] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.731639] env[68638]: DEBUG nova.compute.manager [req-e168baaf-ccac-40b5-8040-8e0a3bec13ed req-1c22bb3a-f5af-4635-aab1-64817f95db31 service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Received event network-changed-acf48d4a-b19e-47d9-a807-d221c4f0fd05 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1068.731841] env[68638]: DEBUG nova.compute.manager [req-e168baaf-ccac-40b5-8040-8e0a3bec13ed req-1c22bb3a-f5af-4635-aab1-64817f95db31 service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Refreshing instance network info cache due to event network-changed-acf48d4a-b19e-47d9-a807-d221c4f0fd05. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1068.732076] env[68638]: DEBUG oslo_concurrency.lockutils [req-e168baaf-ccac-40b5-8040-8e0a3bec13ed req-1c22bb3a-f5af-4635-aab1-64817f95db31 service nova] Acquiring lock "refresh_cache-ace44b04-6dcf-4845-af4e-b28ddeebe60e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.732233] env[68638]: DEBUG oslo_concurrency.lockutils [req-e168baaf-ccac-40b5-8040-8e0a3bec13ed req-1c22bb3a-f5af-4635-aab1-64817f95db31 service nova] Acquired lock "refresh_cache-ace44b04-6dcf-4845-af4e-b28ddeebe60e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.732416] env[68638]: DEBUG nova.network.neutron [req-e168baaf-ccac-40b5-8040-8e0a3bec13ed req-1c22bb3a-f5af-4635-aab1-64817f95db31 service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Refreshing network info cache for port acf48d4a-b19e-47d9-a807-d221c4f0fd05 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1068.734694] env[68638]: INFO nova.compute.manager [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Rebuilding instance [ 1068.757609] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1d6af5-1da0-4abd-a07e-01788c0721ec {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.769216] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99aed7d-a12e-43e3-b747-d2e1f2929eee {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.780034] env[68638]: DEBUG nova.network.neutron [req-dbe2b54f-a160-410a-8bd4-0ce1b7b0cd8b req-fe21e648-a4b2-4f87-9e16-4f9036078b49 service nova] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Updated VIF entry in instance network info cache for port c0117ec5-bfa5-418d-8a27-8904ffcfadbd. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1068.780034] env[68638]: DEBUG nova.network.neutron [req-dbe2b54f-a160-410a-8bd4-0ce1b7b0cd8b req-fe21e648-a4b2-4f87-9e16-4f9036078b49 service nova] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Updating instance_info_cache with network_info: [{"id": "c0117ec5-bfa5-418d-8a27-8904ffcfadbd", "address": "fa:16:3e:59:19:81", "network": {"id": "4ccf9e56-9fb3-48ff-bf2d-a35faedb905b", "bridge": null, "label": "tempest-DeleteServersTestJSON-1191830363-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ae89c3992e04141bf24be9d9e84e302", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapc0117ec5-bf", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.819192] env[68638]: DEBUG nova.compute.manager [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1068.820716] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c6a77b-d3b3-4afb-8d31-00b5c4d04aca {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.824883] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe319d1-e525-4bf8-8504-aa3b09d6a511 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.839047] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d47dc4a-7a6a-4fd3-98d8-00227cb8f8dd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.857275] env[68638]: DEBUG nova.compute.provider_tree [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1068.981913] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834280, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.087562] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834281, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.276968] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "c66805eb-fd97-4fe3-984d-8759f227d7fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1069.278444] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "c66805eb-fd97-4fe3-984d-8759f227d7fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1069.284617] env[68638]: DEBUG oslo_concurrency.lockutils [req-dbe2b54f-a160-410a-8bd4-0ce1b7b0cd8b req-fe21e648-a4b2-4f87-9e16-4f9036078b49 service nova] Releasing lock "refresh_cache-0be6f174-fad2-4ee3-be07-b6190073b40c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1069.360443] env[68638]: DEBUG nova.scheduler.client.report [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1069.488205] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834280, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.588143] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834281, 'name': CreateVM_Task, 'duration_secs': 0.899168} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.588331] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1069.589058] env[68638]: DEBUG oslo_concurrency.lockutils [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.589233] env[68638]: DEBUG oslo_concurrency.lockutils [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1069.589577] env[68638]: DEBUG oslo_concurrency.lockutils [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1069.589850] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4155dd4-d545-46f9-a1cc-2044ff043eee {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.596148] env[68638]: DEBUG oslo_vmware.api [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1069.596148] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5289caf0-0a4e-2cbb-c191-491cbfeb0c69" [ 1069.596148] env[68638]: _type = "Task" [ 1069.596148] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.606296] env[68638]: DEBUG oslo_vmware.api [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5289caf0-0a4e-2cbb-c191-491cbfeb0c69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.619187] env[68638]: DEBUG nova.network.neutron [req-e168baaf-ccac-40b5-8040-8e0a3bec13ed req-1c22bb3a-f5af-4635-aab1-64817f95db31 service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Updated VIF entry in instance network info cache for port acf48d4a-b19e-47d9-a807-d221c4f0fd05. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1069.619597] env[68638]: DEBUG nova.network.neutron [req-e168baaf-ccac-40b5-8040-8e0a3bec13ed req-1c22bb3a-f5af-4635-aab1-64817f95db31 service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Updating instance_info_cache with network_info: [{"id": "acf48d4a-b19e-47d9-a807-d221c4f0fd05", "address": "fa:16:3e:93:77:d3", "network": {"id": "5f368894-f202-48ed-bdd5-62442b47a35d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2025484418-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e7777e8e5d342d68e2f54e23d125314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacf48d4a-b1", "ovs_interfaceid": "acf48d4a-b19e-47d9-a807-d221c4f0fd05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.781673] env[68638]: DEBUG nova.compute.manager [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1069.849391] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1069.849743] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6b1a8042-1df8-41a2-a34d-d72a6c27997d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.860798] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1069.860798] env[68638]: value = "task-2834282" [ 1069.860798] env[68638]: _type = "Task" [ 1069.860798] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.872236] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.852s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1069.874991] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834282, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.876447] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 17.317s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1069.876447] env[68638]: DEBUG nova.objects.instance [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68638) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1069.897929] env[68638]: INFO nova.scheduler.client.report [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Deleted allocations for instance 43e0eed3-bc25-476d-a9ef-6b132514cf90 [ 1069.985913] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834280, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.108841] env[68638]: DEBUG oslo_vmware.api [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5289caf0-0a4e-2cbb-c191-491cbfeb0c69, 'name': SearchDatastore_Task, 'duration_secs': 0.09835} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.109366] env[68638]: DEBUG oslo_concurrency.lockutils [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1070.109622] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1070.109870] env[68638]: DEBUG oslo_concurrency.lockutils [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.110114] env[68638]: DEBUG oslo_concurrency.lockutils [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1070.110346] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1070.110640] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a4dd8413-d3b4-4a64-a1d8-6d656155e40b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.124247] env[68638]: DEBUG oslo_concurrency.lockutils [req-e168baaf-ccac-40b5-8040-8e0a3bec13ed req-1c22bb3a-f5af-4635-aab1-64817f95db31 service nova] Releasing lock "refresh_cache-ace44b04-6dcf-4845-af4e-b28ddeebe60e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1070.129323] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1070.129586] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1070.130400] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80c4d34d-e0ab-4d57-b46b-5be8d05b7940 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.137761] env[68638]: DEBUG oslo_vmware.api [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1070.137761] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52423685-b4be-6b70-921c-e736c83199b6" [ 1070.137761] env[68638]: _type = "Task" [ 1070.137761] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.147411] env[68638]: DEBUG oslo_vmware.api [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52423685-b4be-6b70-921c-e736c83199b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.195184] env[68638]: DEBUG oslo_concurrency.lockutils [None req-12882aa1-849b-496b-a0ba-761e44ef0f7e tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "0be6f174-fad2-4ee3-be07-b6190073b40c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.307411] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.375014] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834282, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.411690] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0a9d3b21-f004-4047-9477-f0b376ed1ced tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "43e0eed3-bc25-476d-a9ef-6b132514cf90" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.245s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.485889] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834280, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.651365] env[68638]: DEBUG oslo_vmware.api [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52423685-b4be-6b70-921c-e736c83199b6, 'name': SearchDatastore_Task, 'duration_secs': 0.088198} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.652719] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aecc6c58-922d-4353-93c0-95ba57e0bb2e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.661885] env[68638]: DEBUG oslo_vmware.api [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1070.661885] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52971c3b-3a04-0fc1-5351-5d75af8a00bd" [ 1070.661885] env[68638]: _type = "Task" [ 1070.661885] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.671751] env[68638]: DEBUG oslo_vmware.api [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52971c3b-3a04-0fc1-5351-5d75af8a00bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.873538] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834282, 'name': PowerOffVM_Task, 'duration_secs': 0.53623} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.873872] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1070.874125] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1070.875055] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b63175-8fd2-4ced-94ac-06034689ec09 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.883393] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1070.883680] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-47d2e027-f5fd-4461-9c29-cf60d5d9db0b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.889848] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aded9c2e-0e2f-4294-a49e-9b30220c3b4a tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.891196] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.540s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.891519] env[68638]: DEBUG nova.objects.instance [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lazy-loading 'resources' on Instance uuid 71ec29a8-5e2f-4ccd-9c22-d9721c77622e {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1070.983499] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1070.983677] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1070.983856] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Deleting the datastore file [datastore2] 6213446a-f6a4-439b-a1ed-5b8c2234d6ac {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1070.984323] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e65f124-2fd7-4c33-862d-26de3f03d67e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.989535] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834280, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.996137] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1070.996137] env[68638]: value = "task-2834285" [ 1070.996137] env[68638]: _type = "Task" [ 1070.996137] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.010369] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834285, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.172826] env[68638]: DEBUG oslo_vmware.api [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52971c3b-3a04-0fc1-5351-5d75af8a00bd, 'name': SearchDatastore_Task, 'duration_secs': 0.095378} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.173243] env[68638]: DEBUG oslo_concurrency.lockutils [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1071.173549] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] ace44b04-6dcf-4845-af4e-b28ddeebe60e/ace44b04-6dcf-4845-af4e-b28ddeebe60e.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1071.173841] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7bbf1444-844a-4eb1-b513-7df98fc12720 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.181066] env[68638]: DEBUG oslo_vmware.api [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1071.181066] env[68638]: value = "task-2834286" [ 1071.181066] env[68638]: _type = "Task" [ 1071.181066] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.189330] env[68638]: DEBUG oslo_vmware.api [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834286, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.494403] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834280, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.537059} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.494755] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f510e66f-bc6c-4bb4-af29-0d55b547e445/f510e66f-bc6c-4bb4-af29-0d55b547e445.vmdk to [datastore2] 0249ffb9-82ed-44db-bb20-e619eaa176dd/0249ffb9-82ed-44db-bb20-e619eaa176dd.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1071.495887] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0281ac-b3d5-4cf0-86aa-d9b47ddd5caf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.510177] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834285, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164207} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.523496] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1071.523745] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1071.523928] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1071.538276] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] 0249ffb9-82ed-44db-bb20-e619eaa176dd/0249ffb9-82ed-44db-bb20-e619eaa176dd.vmdk or device None with type streamOptimized {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1071.542392] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9cd4b8de-a30d-4c4e-b866-9595016f1ae4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.569641] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1071.569641] env[68638]: value = "task-2834287" [ 1071.569641] env[68638]: _type = "Task" [ 1071.569641] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.585857] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834287, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.690198] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df7b504b-3269-4f64-b4a1-5774ddde2674 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.696112] env[68638]: DEBUG oslo_vmware.api [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834286, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488949} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.696823] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] ace44b04-6dcf-4845-af4e-b28ddeebe60e/ace44b04-6dcf-4845-af4e-b28ddeebe60e.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1071.697238] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1071.697529] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c002ba91-d7ad-4c8a-bc7a-fa1a176e5946 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.702591] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95aa1a3a-ebc2-4c01-8deb-49f4f83654d7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.707478] env[68638]: DEBUG oslo_vmware.api [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1071.707478] env[68638]: value = "task-2834288" [ 1071.707478] env[68638]: _type = "Task" [ 1071.707478] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.738940] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af00b024-909e-4882-91ad-95441e887b0b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.744515] env[68638]: DEBUG oslo_vmware.api [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834288, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.752943] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e0f0b67-b4f2-43c9-85ac-ccbf0c2af044 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.766487] env[68638]: DEBUG nova.compute.provider_tree [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1072.051771] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "a8bd64fb-8a07-4edf-a1fb-c2984e4212ec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1072.052036] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "a8bd64fb-8a07-4edf-a1fb-c2984e4212ec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1072.082774] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834287, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.217848] env[68638]: DEBUG oslo_vmware.api [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834288, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078636} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.218175] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1072.218972] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82933078-c465-406a-ab77-955eff08ecf9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.248270] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] ace44b04-6dcf-4845-af4e-b28ddeebe60e/ace44b04-6dcf-4845-af4e-b28ddeebe60e.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1072.248651] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e33f20b-297e-4454-879d-462486d86177 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.270315] env[68638]: DEBUG nova.scheduler.client.report [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1072.273860] env[68638]: DEBUG oslo_vmware.api [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1072.273860] env[68638]: value = "task-2834290" [ 1072.273860] env[68638]: _type = "Task" [ 1072.273860] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.283066] env[68638]: DEBUG oslo_vmware.api [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834290, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.555521] env[68638]: DEBUG nova.compute.manager [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1072.569250] env[68638]: DEBUG nova.virt.hardware [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1072.569560] env[68638]: DEBUG nova.virt.hardware [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1072.569830] env[68638]: DEBUG nova.virt.hardware [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1072.570149] env[68638]: DEBUG nova.virt.hardware [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1072.570396] env[68638]: DEBUG nova.virt.hardware [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1072.570652] env[68638]: DEBUG nova.virt.hardware [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1072.570938] env[68638]: DEBUG nova.virt.hardware [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1072.571126] env[68638]: DEBUG nova.virt.hardware [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1072.571384] env[68638]: DEBUG nova.virt.hardware [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1072.571697] env[68638]: DEBUG nova.virt.hardware [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1072.571697] env[68638]: DEBUG nova.virt.hardware [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1072.573096] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbca030c-99e8-4726-bc29-a4a1acd0a445 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.586352] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834287, 'name': ReconfigVM_Task, 'duration_secs': 0.583298} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.588536] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Reconfigured VM instance instance-00000050 to attach disk [datastore2] 0249ffb9-82ed-44db-bb20-e619eaa176dd/0249ffb9-82ed-44db-bb20-e619eaa176dd.vmdk or device None with type streamOptimized {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1072.589919] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_type': 'disk', 'disk_bus': None, 'encryption_secret_uuid': None, 'encryption_format': None, 'encryption_options': None, 'encrypted': False, 'guest_format': None, 'boot_index': 0, 'device_name': '/dev/sda', 'size': 0, 'image_id': 'ef1ae417-fdc1-452d-9e5d-ced4149ebfe9'}], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sdb', 'attachment_id': '04151886-8b4d-4fae-846f-77ae87f82b8d', 'device_type': None, 'disk_bus': None, 'delete_on_termination': False, 'guest_format': None, 'boot_index': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570005', 'volume_id': 'e62ecc96-280f-49b1-b4a1-915281c6d7c5', 'name': 'volume-e62ecc96-280f-49b1-b4a1-915281c6d7c5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '0249ffb9-82ed-44db-bb20-e619eaa176dd', 'attached_at': '', 'detached_at': '', 'volume_id': 'e62ecc96-280f-49b1-b4a1-915281c6d7c5', 'serial': 'e62ecc96-280f-49b1-b4a1-915281c6d7c5'}, 'volume_type': None}], 'swap': None} {{(pid=68638) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1072.590355] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Volume attach. Driver type: vmdk {{(pid=68638) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1072.590578] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570005', 'volume_id': 'e62ecc96-280f-49b1-b4a1-915281c6d7c5', 'name': 'volume-e62ecc96-280f-49b1-b4a1-915281c6d7c5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '0249ffb9-82ed-44db-bb20-e619eaa176dd', 'attached_at': '', 'detached_at': '', 'volume_id': 'e62ecc96-280f-49b1-b4a1-915281c6d7c5', 'serial': 'e62ecc96-280f-49b1-b4a1-915281c6d7c5'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1072.591501] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fbb4616-277e-49a7-a8f0-53098f4d8362 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.594952] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f37d4b3e-2875-4567-b2aa-121f267c9a25 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.611511] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:46:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dcfef634-2496-4f1b-ae08-cf8895e7d4a7', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1072.619778] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1072.631309] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1072.631885] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-02305811-0b21-4afe-a9f2-2b61c4956571 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.648215] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c59906-adc1-411d-9246-e1be1666acd9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.674485] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] volume-e62ecc96-280f-49b1-b4a1-915281c6d7c5/volume-e62ecc96-280f-49b1-b4a1-915281c6d7c5.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1072.676084] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a91d3cfc-473d-48eb-bad4-845908152346 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.689168] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1072.689168] env[68638]: value = "task-2834291" [ 1072.689168] env[68638]: _type = "Task" [ 1072.689168] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.704175] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1072.704175] env[68638]: value = "task-2834292" [ 1072.704175] env[68638]: _type = "Task" [ 1072.704175] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.704531] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834291, 'name': CreateVM_Task} progress is 15%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.713609] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834292, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.778932] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.888s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1072.781546] env[68638]: DEBUG oslo_concurrency.lockutils [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.230s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1072.781697] env[68638]: DEBUG nova.objects.instance [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lazy-loading 'resources' on Instance uuid 90c192bd-b823-414c-b793-260eacc9904f {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1072.790031] env[68638]: DEBUG oslo_vmware.api [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834290, 'name': ReconfigVM_Task, 'duration_secs': 0.293329} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.790220] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Reconfigured VM instance instance-00000065 to attach disk [datastore2] ace44b04-6dcf-4845-af4e-b28ddeebe60e/ace44b04-6dcf-4845-af4e-b28ddeebe60e.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1072.790903] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-052ad52e-abe5-4f18-954a-44c9ad9acb8f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.800123] env[68638]: DEBUG oslo_vmware.api [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1072.800123] env[68638]: value = "task-2834293" [ 1072.800123] env[68638]: _type = "Task" [ 1072.800123] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.806549] env[68638]: INFO nova.scheduler.client.report [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Deleted allocations for instance 71ec29a8-5e2f-4ccd-9c22-d9721c77622e [ 1072.811784] env[68638]: DEBUG oslo_vmware.api [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834293, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.075420] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.200708] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834291, 'name': CreateVM_Task, 'duration_secs': 0.38377} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.200974] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1073.201623] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.201800] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1073.202152] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1073.202422] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdd411cc-fae2-4c65-9b6c-fa460c04e7dc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.210538] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1073.210538] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5272941a-665d-c89f-ca55-e0025c97e68f" [ 1073.210538] env[68638]: _type = "Task" [ 1073.210538] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.217476] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834292, 'name': ReconfigVM_Task, 'duration_secs': 0.347318} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.218341] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Reconfigured VM instance instance-00000050 to attach disk [datastore2] volume-e62ecc96-280f-49b1-b4a1-915281c6d7c5/volume-e62ecc96-280f-49b1-b4a1-915281c6d7c5.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1073.229187] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1908a93d-3d76-4e6e-8572-0e652bff4a36 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.239811] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5272941a-665d-c89f-ca55-e0025c97e68f, 'name': SearchDatastore_Task, 'duration_secs': 0.011062} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.239811] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1073.239811] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1073.239811] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.239981] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1073.240173] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1073.240788] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-da65e081-ffe4-49ca-bf7a-5125eb7eda67 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.247109] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1073.247109] env[68638]: value = "task-2834294" [ 1073.247109] env[68638]: _type = "Task" [ 1073.247109] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.252861] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1073.253076] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1073.256730] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-096d7481-966e-41eb-aa32-95e7f05ffa4d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.259394] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquiring lock "a576ba6f-5e3b-4408-b95d-2084a072ec12" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.259625] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "a576ba6f-5e3b-4408-b95d-2084a072ec12" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.264257] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834294, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.269514] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1073.269514] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]527fedd8-7b89-e512-e7b1-c719d172dff4" [ 1073.269514] env[68638]: _type = "Task" [ 1073.269514] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.284228] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527fedd8-7b89-e512-e7b1-c719d172dff4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.312852] env[68638]: DEBUG oslo_vmware.api [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834293, 'name': Rename_Task, 'duration_secs': 0.220311} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.313153] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1073.313692] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-348ed1d5-811e-4338-89b8-bd1fd848235d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.320232] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74ecfeef-e27c-4c40-8f2b-d1e371d1e226 tempest-ImagesTestJSON-774550192 tempest-ImagesTestJSON-774550192-project-member] Lock "71ec29a8-5e2f-4ccd-9c22-d9721c77622e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.993s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.328665] env[68638]: DEBUG oslo_vmware.api [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1073.328665] env[68638]: value = "task-2834295" [ 1073.328665] env[68638]: _type = "Task" [ 1073.328665] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.341722] env[68638]: DEBUG oslo_vmware.api [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834295, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.514890] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7004450c-66a2-4eeb-a3e6-1c60747a9de5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.523555] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b6d58a7-fa12-4247-a388-f31b690b58d9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.557774] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf95225-6be6-4585-8766-59a130b8b06a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.566312] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6787b258-b96b-4067-abf8-bafa1ef04744 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.580536] env[68638]: DEBUG nova.compute.provider_tree [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1073.764562] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834294, 'name': ReconfigVM_Task, 'duration_secs': 0.160611} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.764562] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570005', 'volume_id': 'e62ecc96-280f-49b1-b4a1-915281c6d7c5', 'name': 'volume-e62ecc96-280f-49b1-b4a1-915281c6d7c5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '0249ffb9-82ed-44db-bb20-e619eaa176dd', 'attached_at': '', 'detached_at': '', 'volume_id': 'e62ecc96-280f-49b1-b4a1-915281c6d7c5', 'serial': 'e62ecc96-280f-49b1-b4a1-915281c6d7c5'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1073.764562] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4526bf92-3b15-44a5-a581-a1dd1ed5f1ec {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.768913] env[68638]: DEBUG nova.compute.manager [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1073.777241] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1073.777241] env[68638]: value = "task-2834296" [ 1073.777241] env[68638]: _type = "Task" [ 1073.777241] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.799707] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527fedd8-7b89-e512-e7b1-c719d172dff4, 'name': SearchDatastore_Task, 'duration_secs': 0.012044} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.799707] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834296, 'name': Rename_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.801082] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b682e6b1-65d2-48c0-a9b4-eca1fcbf381e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.810220] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1073.810220] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52bee0cd-334c-e6d9-1c00-5844afc83229" [ 1073.810220] env[68638]: _type = "Task" [ 1073.810220] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.820361] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52bee0cd-334c-e6d9-1c00-5844afc83229, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.839376] env[68638]: DEBUG oslo_vmware.api [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834295, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.083352] env[68638]: DEBUG nova.scheduler.client.report [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1074.298797] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834296, 'name': Rename_Task, 'duration_secs': 0.190687} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.299546] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1074.299826] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-967ff5df-ae51-40f6-b42b-4eeda255494f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.306188] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1074.309030] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1074.309030] env[68638]: value = "task-2834298" [ 1074.309030] env[68638]: _type = "Task" [ 1074.309030] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.330665] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834298, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.331060] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52bee0cd-334c-e6d9-1c00-5844afc83229, 'name': SearchDatastore_Task, 'duration_secs': 0.016722} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.338656] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1074.338944] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 6213446a-f6a4-439b-a1ed-5b8c2234d6ac/6213446a-f6a4-439b-a1ed-5b8c2234d6ac.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1074.339264] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0d3096de-8ac2-4687-9879-ddf632d549ba {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.348479] env[68638]: DEBUG oslo_vmware.api [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834295, 'name': PowerOnVM_Task, 'duration_secs': 0.533192} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.350013] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1074.350319] env[68638]: INFO nova.compute.manager [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Took 8.04 seconds to spawn the instance on the hypervisor. [ 1074.350547] env[68638]: DEBUG nova.compute.manager [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1074.350834] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1074.350834] env[68638]: value = "task-2834299" [ 1074.350834] env[68638]: _type = "Task" [ 1074.350834] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.351646] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64cdb1b4-f10d-474c-b1a1-5d9333830762 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.363218] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834299, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.589235] env[68638]: DEBUG oslo_concurrency.lockutils [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.808s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.592632] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.011s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1074.592779] env[68638]: DEBUG nova.objects.instance [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Lazy-loading 'resources' on Instance uuid 9ddb29ae-9724-4712-af58-4b8d6546c6af {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1074.624163] env[68638]: INFO nova.scheduler.client.report [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Deleted allocations for instance 90c192bd-b823-414c-b793-260eacc9904f [ 1074.821803] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834298, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.865229] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834299, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505908} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.865628] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 6213446a-f6a4-439b-a1ed-5b8c2234d6ac/6213446a-f6a4-439b-a1ed-5b8c2234d6ac.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1074.865884] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1074.866171] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0b95b1f6-1d05-421f-9c68-6f198989ca86 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.879999] env[68638]: INFO nova.compute.manager [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Took 24.70 seconds to build instance. [ 1074.882577] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1074.882577] env[68638]: value = "task-2834300" [ 1074.882577] env[68638]: _type = "Task" [ 1074.882577] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.893090] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834300, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.137455] env[68638]: DEBUG oslo_concurrency.lockutils [None req-971cf0dd-99fb-4e24-a57f-506b4db2765d tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "90c192bd-b823-414c-b793-260eacc9904f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.777s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.285729] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e725e20c-4328-48df-8e65-934c521adafc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.294159] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcbbb329-c4de-4a6c-8928-4555137d19bd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.328156] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fb43682-e17f-4785-a254-7f9084192624 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.336369] env[68638]: DEBUG oslo_vmware.api [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834298, 'name': PowerOnVM_Task, 'duration_secs': 0.726201} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.338846] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1075.341922] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5504829d-8ae2-4295-b9e5-f7ca4b8fd010 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.356424] env[68638]: DEBUG nova.compute.provider_tree [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1075.369759] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1075.370331] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1075.383276] env[68638]: DEBUG oslo_concurrency.lockutils [None req-553b128a-648b-479b-8be9-51576711bd2e tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "ace44b04-6dcf-4845-af4e-b28ddeebe60e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.212s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.393827] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834300, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078819} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.394130] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1075.394940] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce1f434e-d8b4-4a40-a8ee-d82f1236ab93 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.420811] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] 6213446a-f6a4-439b-a1ed-5b8c2234d6ac/6213446a-f6a4-439b-a1ed-5b8c2234d6ac.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1075.421480] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4e65a0f-0ef9-44d2-a0a1-46c681b4fa0a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.453770] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1075.453770] env[68638]: value = "task-2834301" [ 1075.453770] env[68638]: _type = "Task" [ 1075.453770] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.467059] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834301, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.468111] env[68638]: DEBUG nova.compute.manager [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1075.468957] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9296316-80ef-4204-8a34-d4ca5cb1cb53 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.860299] env[68638]: DEBUG nova.scheduler.client.report [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1075.877163] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1075.879020] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1075.879020] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1075.879020] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1075.879020] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1075.879020] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1075.879020] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68638) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1075.879020] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager.update_available_resource {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1075.969397] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834301, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.989617] env[68638]: DEBUG oslo_concurrency.lockutils [None req-18728adc-466c-4cf7-a494-acfb58bb711c tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "0249ffb9-82ed-44db-bb20-e619eaa176dd" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 36.447s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.304662] env[68638]: DEBUG nova.compute.manager [req-c83682fd-6abc-4eea-850e-dbff0be9a5e9 req-dc2fe536-e14d-45e4-9635-711bed0d92a4 service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Received event network-changed-acf48d4a-b19e-47d9-a807-d221c4f0fd05 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1076.304863] env[68638]: DEBUG nova.compute.manager [req-c83682fd-6abc-4eea-850e-dbff0be9a5e9 req-dc2fe536-e14d-45e4-9635-711bed0d92a4 service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Refreshing instance network info cache due to event network-changed-acf48d4a-b19e-47d9-a807-d221c4f0fd05. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1076.309194] env[68638]: DEBUG oslo_concurrency.lockutils [req-c83682fd-6abc-4eea-850e-dbff0be9a5e9 req-dc2fe536-e14d-45e4-9635-711bed0d92a4 service nova] Acquiring lock "refresh_cache-ace44b04-6dcf-4845-af4e-b28ddeebe60e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.309465] env[68638]: DEBUG oslo_concurrency.lockutils [req-c83682fd-6abc-4eea-850e-dbff0be9a5e9 req-dc2fe536-e14d-45e4-9635-711bed0d92a4 service nova] Acquired lock "refresh_cache-ace44b04-6dcf-4845-af4e-b28ddeebe60e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1076.309757] env[68638]: DEBUG nova.network.neutron [req-c83682fd-6abc-4eea-850e-dbff0be9a5e9 req-dc2fe536-e14d-45e4-9635-711bed0d92a4 service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Refreshing network info cache for port acf48d4a-b19e-47d9-a807-d221c4f0fd05 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1076.368368] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.775s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.369729] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.170s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.372196] env[68638]: INFO nova.compute.claims [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1076.382977] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.398485] env[68638]: INFO nova.scheduler.client.report [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Deleted allocations for instance 9ddb29ae-9724-4712-af58-4b8d6546c6af [ 1076.477331] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834301, 'name': ReconfigVM_Task, 'duration_secs': 1.000052} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.477331] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Reconfigured VM instance instance-00000064 to attach disk [datastore2] 6213446a-f6a4-439b-a1ed-5b8c2234d6ac/6213446a-f6a4-439b-a1ed-5b8c2234d6ac.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1076.478464] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e504e1d2-f873-4eec-b825-1baf35720242 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.489721] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1076.489721] env[68638]: value = "task-2834302" [ 1076.489721] env[68638]: _type = "Task" [ 1076.489721] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.503624] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834302, 'name': Rename_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.910250] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe01b5d6-c291-4ff1-a06e-6fd2e4d08ab7 tempest-ServerShowV254Test-2089820914 tempest-ServerShowV254Test-2089820914-project-member] Lock "9ddb29ae-9724-4712-af58-4b8d6546c6af" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.593s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1077.007729] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834302, 'name': Rename_Task, 'duration_secs': 0.174488} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.008146] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1077.008380] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d5e48c9b-5805-41a6-b9fc-329834cefa53 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.018322] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1077.018322] env[68638]: value = "task-2834303" [ 1077.018322] env[68638]: _type = "Task" [ 1077.018322] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.027879] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834303, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.218058] env[68638]: DEBUG nova.network.neutron [req-c83682fd-6abc-4eea-850e-dbff0be9a5e9 req-dc2fe536-e14d-45e4-9635-711bed0d92a4 service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Updated VIF entry in instance network info cache for port acf48d4a-b19e-47d9-a807-d221c4f0fd05. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1077.218469] env[68638]: DEBUG nova.network.neutron [req-c83682fd-6abc-4eea-850e-dbff0be9a5e9 req-dc2fe536-e14d-45e4-9635-711bed0d92a4 service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Updating instance_info_cache with network_info: [{"id": "acf48d4a-b19e-47d9-a807-d221c4f0fd05", "address": "fa:16:3e:93:77:d3", "network": {"id": "5f368894-f202-48ed-bdd5-62442b47a35d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2025484418-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e7777e8e5d342d68e2f54e23d125314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacf48d4a-b1", "ovs_interfaceid": "acf48d4a-b19e-47d9-a807-d221c4f0fd05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.534202] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834303, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.647028] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeea2bb0-e389-49a0-b410-defa89756d93 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.655996] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cb1d12f-5c75-4602-b090-9ca8bc982cc0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.663612] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1077.663854] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1077.694066] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc8503bd-f84b-41d1-a775-d5b5ea64fbe3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.704792] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75967094-3508-4198-a714-405e42e1a8aa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.723102] env[68638]: DEBUG oslo_concurrency.lockutils [req-c83682fd-6abc-4eea-850e-dbff0be9a5e9 req-dc2fe536-e14d-45e4-9635-711bed0d92a4 service nova] Releasing lock "refresh_cache-ace44b04-6dcf-4845-af4e-b28ddeebe60e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1077.723703] env[68638]: DEBUG nova.compute.provider_tree [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1078.030336] env[68638]: DEBUG oslo_vmware.api [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834303, 'name': PowerOnVM_Task, 'duration_secs': 0.77979} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.031070] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1078.031070] env[68638]: DEBUG nova.compute.manager [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1078.031766] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024b7d42-24f0-4fec-81bd-b2d11b299fa5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.166581] env[68638]: DEBUG nova.compute.manager [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1078.227610] env[68638]: DEBUG nova.scheduler.client.report [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1078.504727] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "e0903192-4fa7-437a-9023-33e8e65124e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.504867] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "e0903192-4fa7-437a-9023-33e8e65124e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.553030] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.699536] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.732670] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.363s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1078.733232] env[68638]: DEBUG nova.compute.manager [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1078.736250] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.105s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.736545] env[68638]: DEBUG nova.objects.instance [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lazy-loading 'resources' on Instance uuid 0be6f174-fad2-4ee3-be07-b6190073b40c {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1079.009414] env[68638]: DEBUG nova.compute.manager [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1079.239594] env[68638]: DEBUG nova.compute.utils [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1079.241047] env[68638]: DEBUG nova.objects.instance [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lazy-loading 'numa_topology' on Instance uuid 0be6f174-fad2-4ee3-be07-b6190073b40c {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1079.242326] env[68638]: DEBUG nova.compute.manager [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1079.243292] env[68638]: DEBUG nova.network.neutron [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1079.298719] env[68638]: DEBUG nova.policy [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0fb22fd94276463ebb001ec679a36fec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c238a05699ee42f9a3d69c16f0777ae9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1079.333441] env[68638]: DEBUG oslo_concurrency.lockutils [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "6213446a-f6a4-439b-a1ed-5b8c2234d6ac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.333693] env[68638]: DEBUG oslo_concurrency.lockutils [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "6213446a-f6a4-439b-a1ed-5b8c2234d6ac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.333884] env[68638]: DEBUG oslo_concurrency.lockutils [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "6213446a-f6a4-439b-a1ed-5b8c2234d6ac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.334252] env[68638]: DEBUG oslo_concurrency.lockutils [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "6213446a-f6a4-439b-a1ed-5b8c2234d6ac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.334745] env[68638]: DEBUG oslo_concurrency.lockutils [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "6213446a-f6a4-439b-a1ed-5b8c2234d6ac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.337763] env[68638]: INFO nova.compute.manager [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Terminating instance [ 1079.399093] env[68638]: DEBUG oslo_concurrency.lockutils [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Acquiring lock "cb8611f1-d987-43f9-bb4e-4b404c952510" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.399093] env[68638]: DEBUG oslo_concurrency.lockutils [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Lock "cb8611f1-d987-43f9-bb4e-4b404c952510" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.534731] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.600927] env[68638]: DEBUG nova.network.neutron [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Successfully created port: ed59a3ef-d65c-48e6-9271-4552c024c365 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1079.742999] env[68638]: DEBUG nova.compute.manager [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1079.745727] env[68638]: DEBUG nova.objects.base [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Object Instance<0be6f174-fad2-4ee3-be07-b6190073b40c> lazy-loaded attributes: resources,numa_topology {{(pid=68638) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1079.840029] env[68638]: DEBUG nova.compute.manager [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1079.840208] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1079.843527] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9439063f-b398-49cf-9215-ef9c056dcb6b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.855241] env[68638]: DEBUG oslo_concurrency.lockutils [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "ba07529b-e6d0-4c22-b938-c4908a7eafd7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.855516] env[68638]: DEBUG oslo_concurrency.lockutils [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "ba07529b-e6d0-4c22-b938-c4908a7eafd7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.860955] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1079.861226] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-65e65aef-4cd7-4825-bd65-7ef5370477aa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.873523] env[68638]: DEBUG oslo_vmware.api [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1079.873523] env[68638]: value = "task-2834304" [ 1079.873523] env[68638]: _type = "Task" [ 1079.873523] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.884011] env[68638]: DEBUG oslo_vmware.api [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834304, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.901406] env[68638]: DEBUG nova.compute.manager [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1080.039618] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e071f091-1fb0-45ce-9379-7e14e97099be {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.048077] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2acd18c1-ec13-4726-9ebc-4c37b12516a5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.090801] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ef9ccc-d6ea-492b-94d9-79a9db27f788 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.099337] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d509346a-50b1-41ac-9bb5-7ace0a1dcfe8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.113801] env[68638]: DEBUG nova.compute.provider_tree [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1080.362046] env[68638]: DEBUG nova.compute.manager [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1080.383655] env[68638]: DEBUG oslo_vmware.api [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834304, 'name': PowerOffVM_Task, 'duration_secs': 0.278184} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.383986] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1080.384179] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1080.384784] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a60d42a2-75ad-458c-b756-d33911c2ba2b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.419949] env[68638]: DEBUG oslo_concurrency.lockutils [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1080.465956] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1080.466283] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1080.466477] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Deleting the datastore file [datastore2] 6213446a-f6a4-439b-a1ed-5b8c2234d6ac {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1080.466746] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cd0cd9af-9231-4787-995e-60a3f2331bee {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.474137] env[68638]: DEBUG oslo_vmware.api [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1080.474137] env[68638]: value = "task-2834306" [ 1080.474137] env[68638]: _type = "Task" [ 1080.474137] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.483088] env[68638]: DEBUG oslo_vmware.api [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834306, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.619024] env[68638]: DEBUG nova.scheduler.client.report [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1080.757593] env[68638]: DEBUG nova.compute.manager [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1080.785282] env[68638]: DEBUG nova.virt.hardware [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1080.785538] env[68638]: DEBUG nova.virt.hardware [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1080.785694] env[68638]: DEBUG nova.virt.hardware [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1080.785874] env[68638]: DEBUG nova.virt.hardware [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1080.786033] env[68638]: DEBUG nova.virt.hardware [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1080.786230] env[68638]: DEBUG nova.virt.hardware [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1080.786451] env[68638]: DEBUG nova.virt.hardware [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1080.786612] env[68638]: DEBUG nova.virt.hardware [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1080.786779] env[68638]: DEBUG nova.virt.hardware [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1080.786942] env[68638]: DEBUG nova.virt.hardware [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1080.787147] env[68638]: DEBUG nova.virt.hardware [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1080.788057] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0900c3c-d707-4a6d-8955-4465a77f59e1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.796565] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b58675-c05b-495e-bf77-65fd9defeb78 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.883607] env[68638]: DEBUG oslo_concurrency.lockutils [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1080.986420] env[68638]: DEBUG oslo_vmware.api [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834306, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143152} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.986698] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1080.986888] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1080.987093] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1080.987291] env[68638]: INFO nova.compute.manager [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1080.987538] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1080.987730] env[68638]: DEBUG nova.compute.manager [-] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1080.987831] env[68638]: DEBUG nova.network.neutron [-] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1081.122401] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.386s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.124972] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.818s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.126600] env[68638]: INFO nova.compute.claims [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1081.241091] env[68638]: DEBUG nova.compute.manager [req-4e20fdde-c86b-4ec9-9c3e-f31657a2313a req-2f4e7775-46a2-4be9-a50a-691f93063ba9 service nova] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Received event network-vif-plugged-ed59a3ef-d65c-48e6-9271-4552c024c365 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1081.241692] env[68638]: DEBUG oslo_concurrency.lockutils [req-4e20fdde-c86b-4ec9-9c3e-f31657a2313a req-2f4e7775-46a2-4be9-a50a-691f93063ba9 service nova] Acquiring lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.242031] env[68638]: DEBUG oslo_concurrency.lockutils [req-4e20fdde-c86b-4ec9-9c3e-f31657a2313a req-2f4e7775-46a2-4be9-a50a-691f93063ba9 service nova] Lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.242137] env[68638]: DEBUG oslo_concurrency.lockutils [req-4e20fdde-c86b-4ec9-9c3e-f31657a2313a req-2f4e7775-46a2-4be9-a50a-691f93063ba9 service nova] Lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.243168] env[68638]: DEBUG nova.compute.manager [req-4e20fdde-c86b-4ec9-9c3e-f31657a2313a req-2f4e7775-46a2-4be9-a50a-691f93063ba9 service nova] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] No waiting events found dispatching network-vif-plugged-ed59a3ef-d65c-48e6-9271-4552c024c365 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1081.243592] env[68638]: WARNING nova.compute.manager [req-4e20fdde-c86b-4ec9-9c3e-f31657a2313a req-2f4e7775-46a2-4be9-a50a-691f93063ba9 service nova] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Received unexpected event network-vif-plugged-ed59a3ef-d65c-48e6-9271-4552c024c365 for instance with vm_state building and task_state spawning. [ 1081.281490] env[68638]: DEBUG nova.compute.manager [req-b871ea33-1cd9-4c9f-be12-9149729bd210 req-ec71e612-bf9a-4b24-a972-6563f2c09731 service nova] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Received event network-vif-deleted-dcfef634-2496-4f1b-ae08-cf8895e7d4a7 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1081.281699] env[68638]: INFO nova.compute.manager [req-b871ea33-1cd9-4c9f-be12-9149729bd210 req-ec71e612-bf9a-4b24-a972-6563f2c09731 service nova] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Neutron deleted interface dcfef634-2496-4f1b-ae08-cf8895e7d4a7; detaching it from the instance and deleting it from the info cache [ 1081.281873] env[68638]: DEBUG nova.network.neutron [req-b871ea33-1cd9-4c9f-be12-9149729bd210 req-ec71e612-bf9a-4b24-a972-6563f2c09731 service nova] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.335836] env[68638]: DEBUG nova.network.neutron [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Successfully updated port: ed59a3ef-d65c-48e6-9271-4552c024c365 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1081.635133] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6389f2f2-0f31-4085-9e17-5d091f5e333f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "0be6f174-fad2-4ee3-be07-b6190073b40c" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 32.753s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.635927] env[68638]: DEBUG oslo_concurrency.lockutils [None req-12882aa1-849b-496b-a0ba-761e44ef0f7e tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "0be6f174-fad2-4ee3-be07-b6190073b40c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 11.441s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.636180] env[68638]: DEBUG oslo_concurrency.lockutils [None req-12882aa1-849b-496b-a0ba-761e44ef0f7e tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "0be6f174-fad2-4ee3-be07-b6190073b40c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.636396] env[68638]: DEBUG oslo_concurrency.lockutils [None req-12882aa1-849b-496b-a0ba-761e44ef0f7e tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "0be6f174-fad2-4ee3-be07-b6190073b40c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.636562] env[68638]: DEBUG oslo_concurrency.lockutils [None req-12882aa1-849b-496b-a0ba-761e44ef0f7e tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "0be6f174-fad2-4ee3-be07-b6190073b40c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.638018] env[68638]: INFO nova.compute.manager [None req-12882aa1-849b-496b-a0ba-761e44ef0f7e tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Terminating instance [ 1081.757869] env[68638]: DEBUG nova.network.neutron [-] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.785283] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9251cd0e-b4a1-4313-94bd-66da41655e87 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.796432] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e86275-5e10-4ea5-a994-8d663034dc65 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.828543] env[68638]: DEBUG nova.compute.manager [req-b871ea33-1cd9-4c9f-be12-9149729bd210 req-ec71e612-bf9a-4b24-a972-6563f2c09731 service nova] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Detach interface failed, port_id=dcfef634-2496-4f1b-ae08-cf8895e7d4a7, reason: Instance 6213446a-f6a4-439b-a1ed-5b8c2234d6ac could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1081.838419] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "refresh_cache-58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.838570] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired lock "refresh_cache-58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1081.838720] env[68638]: DEBUG nova.network.neutron [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1082.144044] env[68638]: DEBUG nova.compute.manager [None req-12882aa1-849b-496b-a0ba-761e44ef0f7e tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1082.144044] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-12882aa1-849b-496b-a0ba-761e44ef0f7e tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1082.144044] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e222b58d-210a-4d8b-9f5d-3577bc85a7eb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.153755] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9feee73c-17c3-4dad-a8f8-c0e2116d6d3a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.189418] env[68638]: WARNING nova.virt.vmwareapi.vmops [None req-12882aa1-849b-496b-a0ba-761e44ef0f7e tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0be6f174-fad2-4ee3-be07-b6190073b40c could not be found. [ 1082.189645] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-12882aa1-849b-496b-a0ba-761e44ef0f7e tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1082.189867] env[68638]: INFO nova.compute.manager [None req-12882aa1-849b-496b-a0ba-761e44ef0f7e tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1082.190156] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-12882aa1-849b-496b-a0ba-761e44ef0f7e tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1082.193323] env[68638]: DEBUG nova.compute.manager [-] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1082.193436] env[68638]: DEBUG nova.network.neutron [-] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1082.261090] env[68638]: INFO nova.compute.manager [-] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Took 1.27 seconds to deallocate network for instance. [ 1082.373055] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c752d1-7096-4a04-b380-ae5efb052a47 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.381207] env[68638]: DEBUG nova.network.neutron [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1082.384176] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe0b9f7-07a4-4d6d-abec-347218e2c0ff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.417606] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d05603-aa04-45e5-8de5-a41552d7a4b6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.428946] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f115501-8f25-4159-a7ba-79b7788dfd9c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.443965] env[68638]: DEBUG nova.compute.provider_tree [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1082.561986] env[68638]: DEBUG nova.network.neutron [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Updating instance_info_cache with network_info: [{"id": "ed59a3ef-d65c-48e6-9271-4552c024c365", "address": "fa:16:3e:18:6d:07", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped59a3ef-d6", "ovs_interfaceid": "ed59a3ef-d65c-48e6-9271-4552c024c365", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.768069] env[68638]: DEBUG oslo_concurrency.lockutils [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.946981] env[68638]: DEBUG nova.scheduler.client.report [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1082.990871] env[68638]: DEBUG nova.network.neutron [-] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.065352] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Releasing lock "refresh_cache-58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.065515] env[68638]: DEBUG nova.compute.manager [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Instance network_info: |[{"id": "ed59a3ef-d65c-48e6-9271-4552c024c365", "address": "fa:16:3e:18:6d:07", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped59a3ef-d6", "ovs_interfaceid": "ed59a3ef-d65c-48e6-9271-4552c024c365", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1083.065933] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:18:6d:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19598cc1-e105-4565-906a-09dde75e3fbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ed59a3ef-d65c-48e6-9271-4552c024c365', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1083.074944] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Creating folder: Project (c238a05699ee42f9a3d69c16f0777ae9). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1083.076166] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cd80a385-a834-4f49-bd14-3de686c4e997 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.090460] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Created folder: Project (c238a05699ee42f9a3d69c16f0777ae9) in parent group-v569734. [ 1083.090739] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Creating folder: Instances. Parent ref: group-v570019. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1083.091049] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-78011fa4-1399-405b-80f8-cdca0ec38372 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.101823] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Created folder: Instances in parent group-v570019. [ 1083.102078] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1083.102282] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1083.102498] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c44a961-3da6-4d0e-b770-e04d1d389817 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.122833] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1083.122833] env[68638]: value = "task-2834309" [ 1083.122833] env[68638]: _type = "Task" [ 1083.122833] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.131368] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834309, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.271884] env[68638]: DEBUG nova.compute.manager [req-86b6f502-2f42-4048-97b0-fd6244412e30 req-c1b3561a-38d9-4dcf-9dbe-9eff7f3d5d25 service nova] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Received event network-changed-ed59a3ef-d65c-48e6-9271-4552c024c365 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1083.271884] env[68638]: DEBUG nova.compute.manager [req-86b6f502-2f42-4048-97b0-fd6244412e30 req-c1b3561a-38d9-4dcf-9dbe-9eff7f3d5d25 service nova] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Refreshing instance network info cache due to event network-changed-ed59a3ef-d65c-48e6-9271-4552c024c365. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1083.272333] env[68638]: DEBUG oslo_concurrency.lockutils [req-86b6f502-2f42-4048-97b0-fd6244412e30 req-c1b3561a-38d9-4dcf-9dbe-9eff7f3d5d25 service nova] Acquiring lock "refresh_cache-58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.272538] env[68638]: DEBUG oslo_concurrency.lockutils [req-86b6f502-2f42-4048-97b0-fd6244412e30 req-c1b3561a-38d9-4dcf-9dbe-9eff7f3d5d25 service nova] Acquired lock "refresh_cache-58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.272673] env[68638]: DEBUG nova.network.neutron [req-86b6f502-2f42-4048-97b0-fd6244412e30 req-c1b3561a-38d9-4dcf-9dbe-9eff7f3d5d25 service nova] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Refreshing network info cache for port ed59a3ef-d65c-48e6-9271-4552c024c365 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1083.452710] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.327s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.453369] env[68638]: DEBUG nova.compute.manager [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1083.456085] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.381s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.457642] env[68638]: INFO nova.compute.claims [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1083.494242] env[68638]: INFO nova.compute.manager [-] [instance: 0be6f174-fad2-4ee3-be07-b6190073b40c] Took 1.30 seconds to deallocate network for instance. [ 1083.633665] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834309, 'name': CreateVM_Task, 'duration_secs': 0.448528} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.633797] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1083.634511] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.634719] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.635087] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1083.635377] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2efef7fb-a70f-4582-8c0f-7535c8866e76 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.640199] env[68638]: DEBUG oslo_vmware.api [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1083.640199] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52a725c0-e494-d4a6-703c-34ca6f0a6c4c" [ 1083.640199] env[68638]: _type = "Task" [ 1083.640199] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.647976] env[68638]: DEBUG oslo_vmware.api [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a725c0-e494-d4a6-703c-34ca6f0a6c4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.960017] env[68638]: DEBUG nova.network.neutron [req-86b6f502-2f42-4048-97b0-fd6244412e30 req-c1b3561a-38d9-4dcf-9dbe-9eff7f3d5d25 service nova] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Updated VIF entry in instance network info cache for port ed59a3ef-d65c-48e6-9271-4552c024c365. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1083.960563] env[68638]: DEBUG nova.network.neutron [req-86b6f502-2f42-4048-97b0-fd6244412e30 req-c1b3561a-38d9-4dcf-9dbe-9eff7f3d5d25 service nova] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Updating instance_info_cache with network_info: [{"id": "ed59a3ef-d65c-48e6-9271-4552c024c365", "address": "fa:16:3e:18:6d:07", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped59a3ef-d6", "ovs_interfaceid": "ed59a3ef-d65c-48e6-9271-4552c024c365", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.963498] env[68638]: DEBUG nova.compute.utils [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1083.966565] env[68638]: DEBUG nova.compute.manager [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1083.966735] env[68638]: DEBUG nova.network.neutron [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1084.012472] env[68638]: DEBUG nova.policy [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a5fce0bf2fb44b84afd238d875790fbd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ccc24eaf6cf74d539558c0a736e18c3e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1084.152146] env[68638]: DEBUG oslo_vmware.api [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a725c0-e494-d4a6-703c-34ca6f0a6c4c, 'name': SearchDatastore_Task, 'duration_secs': 0.013706} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.152333] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1084.153031] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1084.153031] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.153031] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1084.153031] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1084.153342] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a1b3a90-c995-47d3-bc49-ffc0c55b8d3c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.163459] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1084.163667] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1084.164418] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4187e43-4be8-4da1-a979-97ee396749fe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.172184] env[68638]: DEBUG oslo_vmware.api [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1084.172184] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]525e971f-5558-1789-7f52-ac5d5ddc753d" [ 1084.172184] env[68638]: _type = "Task" [ 1084.172184] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.180761] env[68638]: DEBUG oslo_vmware.api [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525e971f-5558-1789-7f52-ac5d5ddc753d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.281743] env[68638]: DEBUG nova.network.neutron [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Successfully created port: 2ee11caa-7a55-450e-b8b2-af4bc1c60e64 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1084.463791] env[68638]: DEBUG oslo_concurrency.lockutils [req-86b6f502-2f42-4048-97b0-fd6244412e30 req-c1b3561a-38d9-4dcf-9dbe-9eff7f3d5d25 service nova] Releasing lock "refresh_cache-58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1084.467740] env[68638]: DEBUG nova.compute.manager [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1084.523099] env[68638]: DEBUG oslo_concurrency.lockutils [None req-12882aa1-849b-496b-a0ba-761e44ef0f7e tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "0be6f174-fad2-4ee3-be07-b6190073b40c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.887s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1084.693608] env[68638]: DEBUG oslo_vmware.api [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525e971f-5558-1789-7f52-ac5d5ddc753d, 'name': SearchDatastore_Task, 'duration_secs': 0.011722} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.697778] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3b47514-2781-47bb-b3b3-70e0d3b530ed {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.705690] env[68638]: DEBUG oslo_vmware.api [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1084.705690] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]524d1c1e-3cf4-15f6-e014-079967e03411" [ 1084.705690] env[68638]: _type = "Task" [ 1084.705690] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.718279] env[68638]: DEBUG oslo_vmware.api [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]524d1c1e-3cf4-15f6-e014-079967e03411, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.774487] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a1d09e-526d-4a3a-9fbc-e0adb790491c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.783802] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b721749a-4825-4bcc-a772-eebdd77bb5e6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.816236] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f9f7d84-5c33-4c39-b6ef-fc4d2fb8745e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.825844] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb3f4005-9d7b-448f-9de0-0552087036fa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.840760] env[68638]: DEBUG nova.compute.provider_tree [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1085.217277] env[68638]: DEBUG oslo_vmware.api [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]524d1c1e-3cf4-15f6-e014-079967e03411, 'name': SearchDatastore_Task, 'duration_secs': 0.0108} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.217705] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1085.217835] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9/58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1085.218138] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a105ed99-66e2-49f6-ae5b-6e41fc434bdc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.227192] env[68638]: DEBUG oslo_vmware.api [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1085.227192] env[68638]: value = "task-2834310" [ 1085.227192] env[68638]: _type = "Task" [ 1085.227192] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.238261] env[68638]: DEBUG oslo_vmware.api [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834310, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.344726] env[68638]: DEBUG nova.scheduler.client.report [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1085.482525] env[68638]: DEBUG nova.compute.manager [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1085.516583] env[68638]: DEBUG nova.virt.hardware [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1085.516821] env[68638]: DEBUG nova.virt.hardware [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1085.517048] env[68638]: DEBUG nova.virt.hardware [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1085.517325] env[68638]: DEBUG nova.virt.hardware [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1085.517485] env[68638]: DEBUG nova.virt.hardware [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1085.517635] env[68638]: DEBUG nova.virt.hardware [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1085.517878] env[68638]: DEBUG nova.virt.hardware [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1085.518326] env[68638]: DEBUG nova.virt.hardware [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1085.518457] env[68638]: DEBUG nova.virt.hardware [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1085.519174] env[68638]: DEBUG nova.virt.hardware [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1085.519174] env[68638]: DEBUG nova.virt.hardware [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1085.519953] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2f8881f-e73f-4667-801b-608b840cc15b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.530319] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aef0fb0-9f2d-41f4-897c-397c903b51cb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.579132] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "f9bd4416-b2c3-4bdd-9066-08935d304765" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.579827] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "f9bd4416-b2c3-4bdd-9066-08935d304765" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.741385] env[68638]: DEBUG oslo_vmware.api [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834310, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513446} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.741795] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9/58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1085.742168] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1085.742547] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6ff5e6a3-5224-40cb-b0be-fd9562a82615 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.750629] env[68638]: DEBUG oslo_vmware.api [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1085.750629] env[68638]: value = "task-2834311" [ 1085.750629] env[68638]: _type = "Task" [ 1085.750629] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.763456] env[68638]: DEBUG oslo_vmware.api [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834311, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.852269] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.396s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.852877] env[68638]: DEBUG nova.compute.manager [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1085.855573] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.549s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.857181] env[68638]: INFO nova.compute.claims [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1086.055497] env[68638]: DEBUG nova.compute.manager [req-73148a06-db58-47f3-97be-ff099a87be95 req-345443cd-1226-46f6-863b-e13b9463b85d service nova] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Received event network-vif-plugged-2ee11caa-7a55-450e-b8b2-af4bc1c60e64 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1086.055767] env[68638]: DEBUG oslo_concurrency.lockutils [req-73148a06-db58-47f3-97be-ff099a87be95 req-345443cd-1226-46f6-863b-e13b9463b85d service nova] Acquiring lock "c66805eb-fd97-4fe3-984d-8759f227d7fc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.055849] env[68638]: DEBUG oslo_concurrency.lockutils [req-73148a06-db58-47f3-97be-ff099a87be95 req-345443cd-1226-46f6-863b-e13b9463b85d service nova] Lock "c66805eb-fd97-4fe3-984d-8759f227d7fc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.056022] env[68638]: DEBUG oslo_concurrency.lockutils [req-73148a06-db58-47f3-97be-ff099a87be95 req-345443cd-1226-46f6-863b-e13b9463b85d service nova] Lock "c66805eb-fd97-4fe3-984d-8759f227d7fc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.056235] env[68638]: DEBUG nova.compute.manager [req-73148a06-db58-47f3-97be-ff099a87be95 req-345443cd-1226-46f6-863b-e13b9463b85d service nova] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] No waiting events found dispatching network-vif-plugged-2ee11caa-7a55-450e-b8b2-af4bc1c60e64 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1086.056405] env[68638]: WARNING nova.compute.manager [req-73148a06-db58-47f3-97be-ff099a87be95 req-345443cd-1226-46f6-863b-e13b9463b85d service nova] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Received unexpected event network-vif-plugged-2ee11caa-7a55-450e-b8b2-af4bc1c60e64 for instance with vm_state building and task_state spawning. [ 1086.082550] env[68638]: DEBUG nova.compute.manager [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1086.139182] env[68638]: DEBUG nova.network.neutron [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Successfully updated port: 2ee11caa-7a55-450e-b8b2-af4bc1c60e64 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1086.261568] env[68638]: DEBUG oslo_vmware.api [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834311, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.333938} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.261915] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1086.262630] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60fa6d83-056e-4f1e-981e-f2879b0ae0d5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.290641] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9/58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1086.291314] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c94ef909-6409-41cb-af43-880ff57b0741 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.313064] env[68638]: DEBUG oslo_vmware.api [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1086.313064] env[68638]: value = "task-2834312" [ 1086.313064] env[68638]: _type = "Task" [ 1086.313064] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.999426] env[68638]: DEBUG nova.compute.utils [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1087.005021] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "refresh_cache-c66805eb-fd97-4fe3-984d-8759f227d7fc" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.005021] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquired lock "refresh_cache-c66805eb-fd97-4fe3-984d-8759f227d7fc" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1087.005021] env[68638]: DEBUG nova.network.neutron [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1087.007760] env[68638]: DEBUG nova.compute.manager [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1087.007760] env[68638]: DEBUG nova.network.neutron [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1087.018394] env[68638]: DEBUG oslo_vmware.api [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834312, 'name': ReconfigVM_Task, 'duration_secs': 0.320997} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.018808] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Reconfigured VM instance instance-00000066 to attach disk [datastore2] 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9/58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1087.019503] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8a901b32-a664-4a0d-8b03-19140f5947f4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.023891] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1087.027333] env[68638]: DEBUG oslo_vmware.api [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1087.027333] env[68638]: value = "task-2834313" [ 1087.027333] env[68638]: _type = "Task" [ 1087.027333] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.035756] env[68638]: DEBUG oslo_vmware.api [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834313, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.069618] env[68638]: DEBUG nova.policy [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '847f535ec96f4ef0b73ae277199b4533', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98a35cb6ae4d4c8688fb89d7da0b2dd1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1087.355508] env[68638]: DEBUG nova.network.neutron [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Successfully created port: 265a1f2f-5f38-40ef-a00c-d006ca04c011 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1087.509820] env[68638]: DEBUG nova.compute.manager [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1087.541930] env[68638]: DEBUG oslo_vmware.api [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834313, 'name': Rename_Task, 'duration_secs': 0.149427} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.542953] env[68638]: DEBUG nova.network.neutron [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1087.545022] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1087.545473] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e009c9aa-3ab5-45df-bb6e-8c991b19eae9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.556718] env[68638]: DEBUG oslo_vmware.api [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1087.556718] env[68638]: value = "task-2834314" [ 1087.556718] env[68638]: _type = "Task" [ 1087.556718] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.572041] env[68638]: DEBUG oslo_vmware.api [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834314, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.713369] env[68638]: DEBUG nova.network.neutron [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Updating instance_info_cache with network_info: [{"id": "2ee11caa-7a55-450e-b8b2-af4bc1c60e64", "address": "fa:16:3e:6d:6a:dd", "network": {"id": "5de0e424-8bf1-4515-8c49-06607ad85c61", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1760008184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc24eaf6cf74d539558c0a736e18c3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ee11caa-7a", "ovs_interfaceid": "2ee11caa-7a55-450e-b8b2-af4bc1c60e64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.763264] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06383488-25de-455e-a09f-41e8b03a1192 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.771755] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e26f92d-91fb-4cc9-aa94-b57aebac28f0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.804274] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d91d8df2-9953-4a81-9df7-644cb3fdf034 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.812692] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-217367d5-78c8-47c5-be99-74aea7e6804c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.826279] env[68638]: DEBUG nova.compute.provider_tree [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1088.069301] env[68638]: DEBUG oslo_vmware.api [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834314, 'name': PowerOnVM_Task, 'duration_secs': 0.490821} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.070230] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1088.070577] env[68638]: INFO nova.compute.manager [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Took 7.31 seconds to spawn the instance on the hypervisor. [ 1088.070888] env[68638]: DEBUG nova.compute.manager [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1088.072522] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2adc40a7-f978-424b-9efe-e25f8d43b973 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.087746] env[68638]: DEBUG nova.compute.manager [req-069bab30-6e64-4713-b993-cf8d1929188f req-dd64bbe2-5422-4d94-a059-e3419e27bc69 service nova] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Received event network-changed-2ee11caa-7a55-450e-b8b2-af4bc1c60e64 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1088.087929] env[68638]: DEBUG nova.compute.manager [req-069bab30-6e64-4713-b993-cf8d1929188f req-dd64bbe2-5422-4d94-a059-e3419e27bc69 service nova] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Refreshing instance network info cache due to event network-changed-2ee11caa-7a55-450e-b8b2-af4bc1c60e64. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1088.088135] env[68638]: DEBUG oslo_concurrency.lockutils [req-069bab30-6e64-4713-b993-cf8d1929188f req-dd64bbe2-5422-4d94-a059-e3419e27bc69 service nova] Acquiring lock "refresh_cache-c66805eb-fd97-4fe3-984d-8759f227d7fc" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.216624] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Releasing lock "refresh_cache-c66805eb-fd97-4fe3-984d-8759f227d7fc" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1088.216999] env[68638]: DEBUG nova.compute.manager [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Instance network_info: |[{"id": "2ee11caa-7a55-450e-b8b2-af4bc1c60e64", "address": "fa:16:3e:6d:6a:dd", "network": {"id": "5de0e424-8bf1-4515-8c49-06607ad85c61", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1760008184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc24eaf6cf74d539558c0a736e18c3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ee11caa-7a", "ovs_interfaceid": "2ee11caa-7a55-450e-b8b2-af4bc1c60e64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1088.217373] env[68638]: DEBUG oslo_concurrency.lockutils [req-069bab30-6e64-4713-b993-cf8d1929188f req-dd64bbe2-5422-4d94-a059-e3419e27bc69 service nova] Acquired lock "refresh_cache-c66805eb-fd97-4fe3-984d-8759f227d7fc" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1088.217581] env[68638]: DEBUG nova.network.neutron [req-069bab30-6e64-4713-b993-cf8d1929188f req-dd64bbe2-5422-4d94-a059-e3419e27bc69 service nova] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Refreshing network info cache for port 2ee11caa-7a55-450e-b8b2-af4bc1c60e64 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1088.218975] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:6a:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08fb4857-7f9b-4f97-86ef-415341fb595d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2ee11caa-7a55-450e-b8b2-af4bc1c60e64', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1088.227371] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1088.231128] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1088.231601] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9d5b94bc-3800-4125-9087-b9006029017c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.253786] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1088.253786] env[68638]: value = "task-2834315" [ 1088.253786] env[68638]: _type = "Task" [ 1088.253786] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.262621] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834315, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.329497] env[68638]: DEBUG nova.scheduler.client.report [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1088.453299] env[68638]: DEBUG nova.network.neutron [req-069bab30-6e64-4713-b993-cf8d1929188f req-dd64bbe2-5422-4d94-a059-e3419e27bc69 service nova] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Updated VIF entry in instance network info cache for port 2ee11caa-7a55-450e-b8b2-af4bc1c60e64. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1088.453817] env[68638]: DEBUG nova.network.neutron [req-069bab30-6e64-4713-b993-cf8d1929188f req-dd64bbe2-5422-4d94-a059-e3419e27bc69 service nova] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Updating instance_info_cache with network_info: [{"id": "2ee11caa-7a55-450e-b8b2-af4bc1c60e64", "address": "fa:16:3e:6d:6a:dd", "network": {"id": "5de0e424-8bf1-4515-8c49-06607ad85c61", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1760008184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc24eaf6cf74d539558c0a736e18c3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ee11caa-7a", "ovs_interfaceid": "2ee11caa-7a55-450e-b8b2-af4bc1c60e64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.520739] env[68638]: DEBUG nova.compute.manager [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1088.550905] env[68638]: DEBUG nova.virt.hardware [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1088.551177] env[68638]: DEBUG nova.virt.hardware [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1088.551362] env[68638]: DEBUG nova.virt.hardware [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1088.551548] env[68638]: DEBUG nova.virt.hardware [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1088.551694] env[68638]: DEBUG nova.virt.hardware [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1088.551836] env[68638]: DEBUG nova.virt.hardware [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1088.552062] env[68638]: DEBUG nova.virt.hardware [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1088.552226] env[68638]: DEBUG nova.virt.hardware [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1088.552394] env[68638]: DEBUG nova.virt.hardware [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1088.552557] env[68638]: DEBUG nova.virt.hardware [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1088.552830] env[68638]: DEBUG nova.virt.hardware [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1088.553860] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db05cb1-f5a5-4a7f-9ade-fe7270e6d881 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.563249] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef7696e-08b5-4439-9469-d31a711bbe4d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.596563] env[68638]: INFO nova.compute.manager [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Took 29.41 seconds to build instance. [ 1088.771252] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834315, 'name': CreateVM_Task, 'duration_secs': 0.300783} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.771405] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1088.772122] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.772293] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1088.772643] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1088.772928] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbbe103e-4339-471d-93ed-b4aa647a930f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.780721] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1088.780721] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52bbaf47-3cd6-59dc-4df1-9ef0e5293e20" [ 1088.780721] env[68638]: _type = "Task" [ 1088.780721] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.794549] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52bbaf47-3cd6-59dc-4df1-9ef0e5293e20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.838711] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.983s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.839288] env[68638]: DEBUG nova.compute.manager [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1088.844087] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 12.461s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.844226] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.844401] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68638) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1088.844708] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 10.292s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.844877] env[68638]: DEBUG nova.objects.instance [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68638) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1088.848406] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4450ee99-a06c-477a-a6c7-12a6f3f3f530 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.859014] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f035bd6b-764f-452f-88a8-4a47d3293d8e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.875025] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30296367-70ce-473d-b721-85a10bab0764 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.883438] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaf42da9-78dc-4ef4-8221-3a0aa5fa77fd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.918343] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179232MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=68638) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1088.918745] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.957626] env[68638]: DEBUG oslo_concurrency.lockutils [req-069bab30-6e64-4713-b993-cf8d1929188f req-dd64bbe2-5422-4d94-a059-e3419e27bc69 service nova] Releasing lock "refresh_cache-c66805eb-fd97-4fe3-984d-8759f227d7fc" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.064121] env[68638]: DEBUG nova.network.neutron [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Successfully updated port: 265a1f2f-5f38-40ef-a00c-d006ca04c011 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1089.098689] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b7ea24d2-b52c-4283-98fe-26616867af12 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.922s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.291588] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52bbaf47-3cd6-59dc-4df1-9ef0e5293e20, 'name': SearchDatastore_Task, 'duration_secs': 0.013083} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.291866] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.292111] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1089.292364] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.292501] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1089.292675] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1089.292925] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-608c26d0-482b-4810-8ab0-cfd5c9e7531a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.302131] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1089.302253] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1089.302939] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-117565ad-b98b-432f-bda5-5f7d262f8cc5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.309920] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1089.309920] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52cbdaa2-29ee-7858-c88e-2fe202e9e00c" [ 1089.309920] env[68638]: _type = "Task" [ 1089.309920] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.317122] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52cbdaa2-29ee-7858-c88e-2fe202e9e00c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.351544] env[68638]: DEBUG nova.compute.utils [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1089.352928] env[68638]: DEBUG nova.compute.manager [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1089.353130] env[68638]: DEBUG nova.network.neutron [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1089.391489] env[68638]: DEBUG nova.policy [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '065b6acad70941b0b43b568d1b2f72b6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8938cbcafe93492e8f53613d992790bf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1089.565026] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "refresh_cache-a8bd64fb-8a07-4edf-a1fb-c2984e4212ec" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.565026] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired lock "refresh_cache-a8bd64fb-8a07-4edf-a1fb-c2984e4212ec" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1089.565026] env[68638]: DEBUG nova.network.neutron [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1089.665990] env[68638]: DEBUG nova.network.neutron [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Successfully created port: 2725817f-dd0e-4f09-ba4d-70f48e578f8c {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1089.821162] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52cbdaa2-29ee-7858-c88e-2fe202e9e00c, 'name': SearchDatastore_Task, 'duration_secs': 0.009894} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.821950] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e6b6d18-d6f6-46c8-86a3-08b2ac21b723 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.827390] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1089.827390] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52bf75a4-e222-94d6-4575-8c8e3734b893" [ 1089.827390] env[68638]: _type = "Task" [ 1089.827390] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.835629] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52bf75a4-e222-94d6-4575-8c8e3734b893, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.853901] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73f2a2cd-d503-428c-85f7-cb90e1ce3958 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.855097] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.156s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.856569] env[68638]: INFO nova.compute.claims [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1089.859467] env[68638]: DEBUG nova.compute.manager [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1090.096482] env[68638]: DEBUG nova.network.neutron [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1090.111839] env[68638]: DEBUG nova.compute.manager [req-0377d7ba-7656-4916-885b-318e7060bdfc req-695cb415-0f1d-4623-8307-c4cf881732b9 service nova] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Received event network-vif-plugged-265a1f2f-5f38-40ef-a00c-d006ca04c011 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1090.112076] env[68638]: DEBUG oslo_concurrency.lockutils [req-0377d7ba-7656-4916-885b-318e7060bdfc req-695cb415-0f1d-4623-8307-c4cf881732b9 service nova] Acquiring lock "a8bd64fb-8a07-4edf-a1fb-c2984e4212ec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.112375] env[68638]: DEBUG oslo_concurrency.lockutils [req-0377d7ba-7656-4916-885b-318e7060bdfc req-695cb415-0f1d-4623-8307-c4cf881732b9 service nova] Lock "a8bd64fb-8a07-4edf-a1fb-c2984e4212ec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.112614] env[68638]: DEBUG oslo_concurrency.lockutils [req-0377d7ba-7656-4916-885b-318e7060bdfc req-695cb415-0f1d-4623-8307-c4cf881732b9 service nova] Lock "a8bd64fb-8a07-4edf-a1fb-c2984e4212ec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.112789] env[68638]: DEBUG nova.compute.manager [req-0377d7ba-7656-4916-885b-318e7060bdfc req-695cb415-0f1d-4623-8307-c4cf881732b9 service nova] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] No waiting events found dispatching network-vif-plugged-265a1f2f-5f38-40ef-a00c-d006ca04c011 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1090.112950] env[68638]: WARNING nova.compute.manager [req-0377d7ba-7656-4916-885b-318e7060bdfc req-695cb415-0f1d-4623-8307-c4cf881732b9 service nova] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Received unexpected event network-vif-plugged-265a1f2f-5f38-40ef-a00c-d006ca04c011 for instance with vm_state building and task_state spawning. [ 1090.113124] env[68638]: DEBUG nova.compute.manager [req-0377d7ba-7656-4916-885b-318e7060bdfc req-695cb415-0f1d-4623-8307-c4cf881732b9 service nova] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Received event network-changed-ed59a3ef-d65c-48e6-9271-4552c024c365 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1090.113281] env[68638]: DEBUG nova.compute.manager [req-0377d7ba-7656-4916-885b-318e7060bdfc req-695cb415-0f1d-4623-8307-c4cf881732b9 service nova] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Refreshing instance network info cache due to event network-changed-ed59a3ef-d65c-48e6-9271-4552c024c365. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1090.113473] env[68638]: DEBUG oslo_concurrency.lockutils [req-0377d7ba-7656-4916-885b-318e7060bdfc req-695cb415-0f1d-4623-8307-c4cf881732b9 service nova] Acquiring lock "refresh_cache-58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.113607] env[68638]: DEBUG oslo_concurrency.lockutils [req-0377d7ba-7656-4916-885b-318e7060bdfc req-695cb415-0f1d-4623-8307-c4cf881732b9 service nova] Acquired lock "refresh_cache-58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1090.113762] env[68638]: DEBUG nova.network.neutron [req-0377d7ba-7656-4916-885b-318e7060bdfc req-695cb415-0f1d-4623-8307-c4cf881732b9 service nova] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Refreshing network info cache for port ed59a3ef-d65c-48e6-9271-4552c024c365 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1090.227243] env[68638]: DEBUG nova.network.neutron [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Updating instance_info_cache with network_info: [{"id": "265a1f2f-5f38-40ef-a00c-d006ca04c011", "address": "fa:16:3e:01:17:01", "network": {"id": "ad22ed5c-0d03-45c8-8bc4-c4f51dbac4fc", "bridge": "br-int", "label": "tempest-ServersTestJSON-2147381832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98a35cb6ae4d4c8688fb89d7da0b2dd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap265a1f2f-5f", "ovs_interfaceid": "265a1f2f-5f38-40ef-a00c-d006ca04c011", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.338920] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52bf75a4-e222-94d6-4575-8c8e3734b893, 'name': SearchDatastore_Task, 'duration_secs': 0.009793} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.340028] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1090.340028] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] c66805eb-fd97-4fe3-984d-8759f227d7fc/c66805eb-fd97-4fe3-984d-8759f227d7fc.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1090.340028] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-13ca4845-1aab-48b3-a5c6-f8d380407074 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.347355] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1090.347355] env[68638]: value = "task-2834316" [ 1090.347355] env[68638]: _type = "Task" [ 1090.347355] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.355951] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834316, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.730875] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Releasing lock "refresh_cache-a8bd64fb-8a07-4edf-a1fb-c2984e4212ec" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1090.731313] env[68638]: DEBUG nova.compute.manager [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Instance network_info: |[{"id": "265a1f2f-5f38-40ef-a00c-d006ca04c011", "address": "fa:16:3e:01:17:01", "network": {"id": "ad22ed5c-0d03-45c8-8bc4-c4f51dbac4fc", "bridge": "br-int", "label": "tempest-ServersTestJSON-2147381832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98a35cb6ae4d4c8688fb89d7da0b2dd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap265a1f2f-5f", "ovs_interfaceid": "265a1f2f-5f38-40ef-a00c-d006ca04c011", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1090.731790] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:17:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '46e1fc20-2067-4e1a-9812-702772a2c82c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '265a1f2f-5f38-40ef-a00c-d006ca04c011', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1090.742095] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1090.745104] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1090.745401] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-02a45a44-9720-4293-bb57-dd8031aff604 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.767092] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1090.767092] env[68638]: value = "task-2834317" [ 1090.767092] env[68638]: _type = "Task" [ 1090.767092] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.776327] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834317, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.857858] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834316, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476933} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.860359] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] c66805eb-fd97-4fe3-984d-8759f227d7fc/c66805eb-fd97-4fe3-984d-8759f227d7fc.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1090.860592] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1090.860858] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f04e159a-becd-4de0-adcb-73cc2aab0f89 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.872934] env[68638]: DEBUG nova.compute.manager [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1090.876918] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1090.876918] env[68638]: value = "task-2834318" [ 1090.876918] env[68638]: _type = "Task" [ 1090.876918] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.889297] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834318, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.901977] env[68638]: DEBUG nova.network.neutron [req-0377d7ba-7656-4916-885b-318e7060bdfc req-695cb415-0f1d-4623-8307-c4cf881732b9 service nova] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Updated VIF entry in instance network info cache for port ed59a3ef-d65c-48e6-9271-4552c024c365. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1090.902386] env[68638]: DEBUG nova.network.neutron [req-0377d7ba-7656-4916-885b-318e7060bdfc req-695cb415-0f1d-4623-8307-c4cf881732b9 service nova] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Updating instance_info_cache with network_info: [{"id": "ed59a3ef-d65c-48e6-9271-4552c024c365", "address": "fa:16:3e:18:6d:07", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped59a3ef-d6", "ovs_interfaceid": "ed59a3ef-d65c-48e6-9271-4552c024c365", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.915512] env[68638]: DEBUG nova.virt.hardware [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1090.915876] env[68638]: DEBUG nova.virt.hardware [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1090.916109] env[68638]: DEBUG nova.virt.hardware [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1090.916410] env[68638]: DEBUG nova.virt.hardware [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1090.916628] env[68638]: DEBUG nova.virt.hardware [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1090.916836] env[68638]: DEBUG nova.virt.hardware [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1090.917243] env[68638]: DEBUG nova.virt.hardware [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1090.917472] env[68638]: DEBUG nova.virt.hardware [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1090.918075] env[68638]: DEBUG nova.virt.hardware [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1090.918075] env[68638]: DEBUG nova.virt.hardware [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1090.918075] env[68638]: DEBUG nova.virt.hardware [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1090.919783] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e80e8e01-1798-4c2d-b120-fc818210df21 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.933032] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51da52d5-0fea-4379-b1a7-797c038e3d1c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.190823] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5119a44c-568c-4637-bc27-2860cf7ee84b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.199724] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9afa9dd4-5b34-42c4-b5eb-c83970448c12 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.236467] env[68638]: DEBUG nova.network.neutron [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Successfully updated port: 2725817f-dd0e-4f09-ba4d-70f48e578f8c {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1091.238728] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fff64c9-18c5-47ee-94cd-eb41119c43db {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.249563] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33bef614-eb90-49a7-876d-1968d835ae70 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.264409] env[68638]: DEBUG nova.compute.provider_tree [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1091.277698] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834317, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.389533] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834318, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.281883} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.389817] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1091.390617] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b81c60df-f661-4b0a-96d2-76005fe9608c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.406691] env[68638]: DEBUG oslo_concurrency.lockutils [req-0377d7ba-7656-4916-885b-318e7060bdfc req-695cb415-0f1d-4623-8307-c4cf881732b9 service nova] Releasing lock "refresh_cache-58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1091.406946] env[68638]: DEBUG nova.compute.manager [req-0377d7ba-7656-4916-885b-318e7060bdfc req-695cb415-0f1d-4623-8307-c4cf881732b9 service nova] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Received event network-changed-265a1f2f-5f38-40ef-a00c-d006ca04c011 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1091.407131] env[68638]: DEBUG nova.compute.manager [req-0377d7ba-7656-4916-885b-318e7060bdfc req-695cb415-0f1d-4623-8307-c4cf881732b9 service nova] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Refreshing instance network info cache due to event network-changed-265a1f2f-5f38-40ef-a00c-d006ca04c011. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1091.407342] env[68638]: DEBUG oslo_concurrency.lockutils [req-0377d7ba-7656-4916-885b-318e7060bdfc req-695cb415-0f1d-4623-8307-c4cf881732b9 service nova] Acquiring lock "refresh_cache-a8bd64fb-8a07-4edf-a1fb-c2984e4212ec" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.407484] env[68638]: DEBUG oslo_concurrency.lockutils [req-0377d7ba-7656-4916-885b-318e7060bdfc req-695cb415-0f1d-4623-8307-c4cf881732b9 service nova] Acquired lock "refresh_cache-a8bd64fb-8a07-4edf-a1fb-c2984e4212ec" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1091.407657] env[68638]: DEBUG nova.network.neutron [req-0377d7ba-7656-4916-885b-318e7060bdfc req-695cb415-0f1d-4623-8307-c4cf881732b9 service nova] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Refreshing network info cache for port 265a1f2f-5f38-40ef-a00c-d006ca04c011 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1091.417864] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] c66805eb-fd97-4fe3-984d-8759f227d7fc/c66805eb-fd97-4fe3-984d-8759f227d7fc.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1091.418680] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-69723eda-9bc2-4f66-b610-4223f995b78d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.439469] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1091.439469] env[68638]: value = "task-2834319" [ 1091.439469] env[68638]: _type = "Task" [ 1091.439469] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.455815] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834319, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.739559] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquiring lock "refresh_cache-a576ba6f-5e3b-4408-b95d-2084a072ec12" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.739559] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquired lock "refresh_cache-a576ba6f-5e3b-4408-b95d-2084a072ec12" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1091.739559] env[68638]: DEBUG nova.network.neutron [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1091.767525] env[68638]: DEBUG nova.scheduler.client.report [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1091.780967] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834317, 'name': CreateVM_Task, 'duration_secs': 0.642842} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.781163] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1091.781896] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.782020] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1091.782354] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1091.783182] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0daa867-af77-43e8-a84d-70612dfe8f00 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.788794] env[68638]: DEBUG oslo_vmware.api [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1091.788794] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52684af1-e660-3a02-b47c-ac130526a9b5" [ 1091.788794] env[68638]: _type = "Task" [ 1091.788794] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.797534] env[68638]: DEBUG oslo_vmware.api [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52684af1-e660-3a02-b47c-ac130526a9b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.950690] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834319, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.092516] env[68638]: DEBUG nova.network.neutron [req-0377d7ba-7656-4916-885b-318e7060bdfc req-695cb415-0f1d-4623-8307-c4cf881732b9 service nova] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Updated VIF entry in instance network info cache for port 265a1f2f-5f38-40ef-a00c-d006ca04c011. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1092.092928] env[68638]: DEBUG nova.network.neutron [req-0377d7ba-7656-4916-885b-318e7060bdfc req-695cb415-0f1d-4623-8307-c4cf881732b9 service nova] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Updating instance_info_cache with network_info: [{"id": "265a1f2f-5f38-40ef-a00c-d006ca04c011", "address": "fa:16:3e:01:17:01", "network": {"id": "ad22ed5c-0d03-45c8-8bc4-c4f51dbac4fc", "bridge": "br-int", "label": "tempest-ServersTestJSON-2147381832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98a35cb6ae4d4c8688fb89d7da0b2dd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap265a1f2f-5f", "ovs_interfaceid": "265a1f2f-5f38-40ef-a00c-d006ca04c011", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.189171] env[68638]: DEBUG nova.compute.manager [req-4d8f9361-1a2e-413e-ae50-8b6fc95d97ad req-ff8a22ef-01ee-4467-ad06-0783f4595f3d service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Received event network-vif-plugged-2725817f-dd0e-4f09-ba4d-70f48e578f8c {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1092.189432] env[68638]: DEBUG oslo_concurrency.lockutils [req-4d8f9361-1a2e-413e-ae50-8b6fc95d97ad req-ff8a22ef-01ee-4467-ad06-0783f4595f3d service nova] Acquiring lock "a576ba6f-5e3b-4408-b95d-2084a072ec12-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.189684] env[68638]: DEBUG oslo_concurrency.lockutils [req-4d8f9361-1a2e-413e-ae50-8b6fc95d97ad req-ff8a22ef-01ee-4467-ad06-0783f4595f3d service nova] Lock "a576ba6f-5e3b-4408-b95d-2084a072ec12-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.189844] env[68638]: DEBUG oslo_concurrency.lockutils [req-4d8f9361-1a2e-413e-ae50-8b6fc95d97ad req-ff8a22ef-01ee-4467-ad06-0783f4595f3d service nova] Lock "a576ba6f-5e3b-4408-b95d-2084a072ec12-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.190058] env[68638]: DEBUG nova.compute.manager [req-4d8f9361-1a2e-413e-ae50-8b6fc95d97ad req-ff8a22ef-01ee-4467-ad06-0783f4595f3d service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] No waiting events found dispatching network-vif-plugged-2725817f-dd0e-4f09-ba4d-70f48e578f8c {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1092.190247] env[68638]: WARNING nova.compute.manager [req-4d8f9361-1a2e-413e-ae50-8b6fc95d97ad req-ff8a22ef-01ee-4467-ad06-0783f4595f3d service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Received unexpected event network-vif-plugged-2725817f-dd0e-4f09-ba4d-70f48e578f8c for instance with vm_state building and task_state spawning. [ 1092.190413] env[68638]: DEBUG nova.compute.manager [req-4d8f9361-1a2e-413e-ae50-8b6fc95d97ad req-ff8a22ef-01ee-4467-ad06-0783f4595f3d service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Received event network-changed-2725817f-dd0e-4f09-ba4d-70f48e578f8c {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1092.190603] env[68638]: DEBUG nova.compute.manager [req-4d8f9361-1a2e-413e-ae50-8b6fc95d97ad req-ff8a22ef-01ee-4467-ad06-0783f4595f3d service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Refreshing instance network info cache due to event network-changed-2725817f-dd0e-4f09-ba4d-70f48e578f8c. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1092.190798] env[68638]: DEBUG oslo_concurrency.lockutils [req-4d8f9361-1a2e-413e-ae50-8b6fc95d97ad req-ff8a22ef-01ee-4467-ad06-0783f4595f3d service nova] Acquiring lock "refresh_cache-a576ba6f-5e3b-4408-b95d-2084a072ec12" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.270259] env[68638]: DEBUG nova.network.neutron [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1092.272723] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.418s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.273225] env[68638]: DEBUG nova.compute.manager [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1092.276034] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.741s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.277664] env[68638]: INFO nova.compute.claims [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1092.300517] env[68638]: DEBUG oslo_vmware.api [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52684af1-e660-3a02-b47c-ac130526a9b5, 'name': SearchDatastore_Task, 'duration_secs': 0.010716} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.300842] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1092.301086] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1092.301322] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.301467] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1092.301640] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1092.301892] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce0d83ca-ffa0-471d-be48-dddc9cd32cb3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.311358] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1092.312390] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1092.312390] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71afb4d7-7460-48e8-91e3-0dd616bd6564 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.317943] env[68638]: DEBUG oslo_vmware.api [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1092.317943] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52dc5b42-dbb8-138d-cef3-a318a3ccb3a0" [ 1092.317943] env[68638]: _type = "Task" [ 1092.317943] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.330484] env[68638]: DEBUG oslo_vmware.api [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52dc5b42-dbb8-138d-cef3-a318a3ccb3a0, 'name': SearchDatastore_Task, 'duration_secs': 0.009847} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.331242] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13bc4079-6615-4e4a-b21b-e666b533c8f0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.336430] env[68638]: DEBUG oslo_vmware.api [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1092.336430] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]527d8abe-adab-6d59-7c50-e781b3e6ff68" [ 1092.336430] env[68638]: _type = "Task" [ 1092.336430] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.344459] env[68638]: DEBUG oslo_vmware.api [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527d8abe-adab-6d59-7c50-e781b3e6ff68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.425904] env[68638]: DEBUG nova.network.neutron [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Updating instance_info_cache with network_info: [{"id": "2725817f-dd0e-4f09-ba4d-70f48e578f8c", "address": "fa:16:3e:68:8a:2b", "network": {"id": "72c025a9-b352-4718-9ffb-469abb0f7099", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1791072145-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8938cbcafe93492e8f53613d992790bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2725817f-dd", "ovs_interfaceid": "2725817f-dd0e-4f09-ba4d-70f48e578f8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.452866] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834319, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.596135] env[68638]: DEBUG oslo_concurrency.lockutils [req-0377d7ba-7656-4916-885b-318e7060bdfc req-695cb415-0f1d-4623-8307-c4cf881732b9 service nova] Releasing lock "refresh_cache-a8bd64fb-8a07-4edf-a1fb-c2984e4212ec" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1092.783061] env[68638]: DEBUG nova.compute.utils [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1092.786100] env[68638]: DEBUG nova.compute.manager [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1092.786305] env[68638]: DEBUG nova.network.neutron [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1092.832861] env[68638]: DEBUG nova.policy [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '35503102d9274ae1b18e12a931d5efa2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa16293a678b4a35ac0837f6ce904e48', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1092.846364] env[68638]: DEBUG oslo_vmware.api [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527d8abe-adab-6d59-7c50-e781b3e6ff68, 'name': SearchDatastore_Task, 'duration_secs': 0.010037} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.846622] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1092.846875] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] a8bd64fb-8a07-4edf-a1fb-c2984e4212ec/a8bd64fb-8a07-4edf-a1fb-c2984e4212ec.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1092.847136] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6ab507c-9635-4810-b150-79d7b1124ad9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.853847] env[68638]: DEBUG oslo_vmware.api [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1092.853847] env[68638]: value = "task-2834320" [ 1092.853847] env[68638]: _type = "Task" [ 1092.853847] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.862692] env[68638]: DEBUG oslo_vmware.api [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834320, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.930471] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Releasing lock "refresh_cache-a576ba6f-5e3b-4408-b95d-2084a072ec12" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1092.930803] env[68638]: DEBUG nova.compute.manager [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Instance network_info: |[{"id": "2725817f-dd0e-4f09-ba4d-70f48e578f8c", "address": "fa:16:3e:68:8a:2b", "network": {"id": "72c025a9-b352-4718-9ffb-469abb0f7099", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1791072145-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8938cbcafe93492e8f53613d992790bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2725817f-dd", "ovs_interfaceid": "2725817f-dd0e-4f09-ba4d-70f48e578f8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1092.931123] env[68638]: DEBUG oslo_concurrency.lockutils [req-4d8f9361-1a2e-413e-ae50-8b6fc95d97ad req-ff8a22ef-01ee-4467-ad06-0783f4595f3d service nova] Acquired lock "refresh_cache-a576ba6f-5e3b-4408-b95d-2084a072ec12" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1092.931309] env[68638]: DEBUG nova.network.neutron [req-4d8f9361-1a2e-413e-ae50-8b6fc95d97ad req-ff8a22ef-01ee-4467-ad06-0783f4595f3d service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Refreshing network info cache for port 2725817f-dd0e-4f09-ba4d-70f48e578f8c {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1092.932642] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:8a:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccf76700-491b-4462-ab19-e6d3a9ff87ac', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2725817f-dd0e-4f09-ba4d-70f48e578f8c', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1092.940398] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Creating folder: Project (8938cbcafe93492e8f53613d992790bf). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1092.944753] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-42927023-1012-4b9c-a30b-8f7e97158198 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.955634] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834319, 'name': ReconfigVM_Task, 'duration_secs': 1.130408} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.955913] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Reconfigured VM instance instance-00000067 to attach disk [datastore2] c66805eb-fd97-4fe3-984d-8759f227d7fc/c66805eb-fd97-4fe3-984d-8759f227d7fc.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1092.956609] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9fdbd892-ae53-4d8b-bcef-f004249b51c4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.959743] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Created folder: Project (8938cbcafe93492e8f53613d992790bf) in parent group-v569734. [ 1092.959799] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Creating folder: Instances. Parent ref: group-v570024. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1092.960398] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-736d1273-7486-4b53-a93c-da5951770ce2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.965110] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1092.965110] env[68638]: value = "task-2834322" [ 1092.965110] env[68638]: _type = "Task" [ 1092.965110] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.972107] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Created folder: Instances in parent group-v570024. [ 1092.972634] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1092.975589] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1092.975857] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834322, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.976074] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7b7e9ccb-ab0a-4d61-a26f-292d887e6842 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.999728] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1092.999728] env[68638]: value = "task-2834324" [ 1092.999728] env[68638]: _type = "Task" [ 1092.999728] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.009958] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834324, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.172813] env[68638]: DEBUG nova.network.neutron [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Successfully created port: fe93833c-d268-4ad4-8246-17c09472e5db {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1093.177957] env[68638]: DEBUG nova.network.neutron [req-4d8f9361-1a2e-413e-ae50-8b6fc95d97ad req-ff8a22ef-01ee-4467-ad06-0783f4595f3d service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Updated VIF entry in instance network info cache for port 2725817f-dd0e-4f09-ba4d-70f48e578f8c. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1093.178386] env[68638]: DEBUG nova.network.neutron [req-4d8f9361-1a2e-413e-ae50-8b6fc95d97ad req-ff8a22ef-01ee-4467-ad06-0783f4595f3d service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Updating instance_info_cache with network_info: [{"id": "2725817f-dd0e-4f09-ba4d-70f48e578f8c", "address": "fa:16:3e:68:8a:2b", "network": {"id": "72c025a9-b352-4718-9ffb-469abb0f7099", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1791072145-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8938cbcafe93492e8f53613d992790bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2725817f-dd", "ovs_interfaceid": "2725817f-dd0e-4f09-ba4d-70f48e578f8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.287842] env[68638]: DEBUG nova.compute.manager [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1093.365325] env[68638]: DEBUG oslo_vmware.api [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834320, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483335} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.365646] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] a8bd64fb-8a07-4edf-a1fb-c2984e4212ec/a8bd64fb-8a07-4edf-a1fb-c2984e4212ec.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1093.365910] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1093.366277] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-712a189e-0e51-49bb-bc88-c10a83fa4a0e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.377927] env[68638]: DEBUG oslo_vmware.api [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1093.377927] env[68638]: value = "task-2834325" [ 1093.377927] env[68638]: _type = "Task" [ 1093.377927] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.388251] env[68638]: DEBUG oslo_vmware.api [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834325, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.480480] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834322, 'name': Rename_Task, 'duration_secs': 0.236238} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.481817] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1093.482133] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ae15886-70db-4f4f-8a3f-6c1dc61fb057 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.489602] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1093.489602] env[68638]: value = "task-2834326" [ 1093.489602] env[68638]: _type = "Task" [ 1093.489602] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.502432] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834326, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.514767] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834324, 'name': CreateVM_Task, 'duration_secs': 0.486388} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.514767] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1093.515214] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.515362] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1093.515664] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1093.516249] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8be554b9-256a-4387-a7a1-3f97029bbe72 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.522827] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1093.522827] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52b4c02f-1082-a09b-ba5f-c57642bccde8" [ 1093.522827] env[68638]: _type = "Task" [ 1093.522827] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.534768] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b4c02f-1082-a09b-ba5f-c57642bccde8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.572062] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce7e35a-9dc3-4d20-a4f7-b4b92f3e6152 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.580093] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8473255-b66b-4b40-9058-8395ff8aec1e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.610028] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5edeb724-81a9-4779-9a24-c9c4709f65cb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.617736] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f0bb3f1-c992-434e-9fd9-750dee37a5dd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.632483] env[68638]: DEBUG nova.compute.provider_tree [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1093.682351] env[68638]: DEBUG oslo_concurrency.lockutils [req-4d8f9361-1a2e-413e-ae50-8b6fc95d97ad req-ff8a22ef-01ee-4467-ad06-0783f4595f3d service nova] Releasing lock "refresh_cache-a576ba6f-5e3b-4408-b95d-2084a072ec12" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1093.888037] env[68638]: DEBUG oslo_vmware.api [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834325, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10434} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.888388] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1093.889108] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a44be5d4-b5bf-496d-89a4-da75cb887ae2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.910941] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] a8bd64fb-8a07-4edf-a1fb-c2984e4212ec/a8bd64fb-8a07-4edf-a1fb-c2984e4212ec.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1093.911207] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a51d52f0-6b8c-4a59-8bc1-e50433946c1a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.932965] env[68638]: DEBUG oslo_vmware.api [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1093.932965] env[68638]: value = "task-2834327" [ 1093.932965] env[68638]: _type = "Task" [ 1093.932965] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.941319] env[68638]: DEBUG oslo_vmware.api [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834327, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.000402] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834326, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.033705] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b4c02f-1082-a09b-ba5f-c57642bccde8, 'name': SearchDatastore_Task, 'duration_secs': 0.011178} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.034063] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1094.034338] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1094.034580] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.034730] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1094.034920] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1094.035202] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b695b55a-15b1-4f44-8267-167b1d3b9d99 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.045199] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1094.045199] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1094.045990] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2dd3b24b-1cde-4bdc-849b-5c73b97ae389 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.052188] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1094.052188] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5259b315-263a-520a-bbd2-63669ddf14e1" [ 1094.052188] env[68638]: _type = "Task" [ 1094.052188] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.061570] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5259b315-263a-520a-bbd2-63669ddf14e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.136572] env[68638]: DEBUG nova.scheduler.client.report [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1094.302852] env[68638]: DEBUG nova.compute.manager [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1094.328485] env[68638]: DEBUG nova.virt.hardware [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1094.328705] env[68638]: DEBUG nova.virt.hardware [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1094.330023] env[68638]: DEBUG nova.virt.hardware [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1094.330023] env[68638]: DEBUG nova.virt.hardware [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1094.330023] env[68638]: DEBUG nova.virt.hardware [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1094.330023] env[68638]: DEBUG nova.virt.hardware [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1094.330023] env[68638]: DEBUG nova.virt.hardware [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1094.330023] env[68638]: DEBUG nova.virt.hardware [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1094.330023] env[68638]: DEBUG nova.virt.hardware [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1094.330023] env[68638]: DEBUG nova.virt.hardware [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1094.330354] env[68638]: DEBUG nova.virt.hardware [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1094.331030] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab1c3fd6-8f2f-412b-95e3-c43213b81c9f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.339689] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004e0615-cd92-488a-b274-5d2b5923ba16 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.443097] env[68638]: DEBUG oslo_vmware.api [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834327, 'name': ReconfigVM_Task, 'duration_secs': 0.299705} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.443411] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Reconfigured VM instance instance-00000068 to attach disk [datastore2] a8bd64fb-8a07-4edf-a1fb-c2984e4212ec/a8bd64fb-8a07-4edf-a1fb-c2984e4212ec.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1094.444066] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bbe703de-3cbc-43d7-99df-cbb1e381dd44 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.455522] env[68638]: DEBUG oslo_vmware.api [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1094.455522] env[68638]: value = "task-2834328" [ 1094.455522] env[68638]: _type = "Task" [ 1094.455522] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.464289] env[68638]: DEBUG oslo_vmware.api [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834328, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.501653] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834326, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.565890] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5259b315-263a-520a-bbd2-63669ddf14e1, 'name': SearchDatastore_Task, 'duration_secs': 0.010246} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.566902] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83c842c2-aa2c-4b48-b7e6-f67190c6fb4a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.573672] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1094.573672] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e47cd0-5bed-0770-48df-2d6f3f7d5387" [ 1094.573672] env[68638]: _type = "Task" [ 1094.573672] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.582545] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e47cd0-5bed-0770-48df-2d6f3f7d5387, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.641692] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.366s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.642105] env[68638]: DEBUG nova.compute.manager [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1094.644891] env[68638]: DEBUG oslo_concurrency.lockutils [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.225s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.646337] env[68638]: INFO nova.compute.claims [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1094.658203] env[68638]: DEBUG nova.compute.manager [req-3a495b07-ccb4-456f-a505-8d434bd43142 req-aa14b946-a469-4696-ab58-0e7280265dc9 service nova] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Received event network-vif-plugged-fe93833c-d268-4ad4-8246-17c09472e5db {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1094.658420] env[68638]: DEBUG oslo_concurrency.lockutils [req-3a495b07-ccb4-456f-a505-8d434bd43142 req-aa14b946-a469-4696-ab58-0e7280265dc9 service nova] Acquiring lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.658630] env[68638]: DEBUG oslo_concurrency.lockutils [req-3a495b07-ccb4-456f-a505-8d434bd43142 req-aa14b946-a469-4696-ab58-0e7280265dc9 service nova] Lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.658797] env[68638]: DEBUG oslo_concurrency.lockutils [req-3a495b07-ccb4-456f-a505-8d434bd43142 req-aa14b946-a469-4696-ab58-0e7280265dc9 service nova] Lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.658961] env[68638]: DEBUG nova.compute.manager [req-3a495b07-ccb4-456f-a505-8d434bd43142 req-aa14b946-a469-4696-ab58-0e7280265dc9 service nova] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] No waiting events found dispatching network-vif-plugged-fe93833c-d268-4ad4-8246-17c09472e5db {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1094.659136] env[68638]: WARNING nova.compute.manager [req-3a495b07-ccb4-456f-a505-8d434bd43142 req-aa14b946-a469-4696-ab58-0e7280265dc9 service nova] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Received unexpected event network-vif-plugged-fe93833c-d268-4ad4-8246-17c09472e5db for instance with vm_state building and task_state spawning. [ 1094.736909] env[68638]: DEBUG nova.network.neutron [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Successfully updated port: fe93833c-d268-4ad4-8246-17c09472e5db {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1094.965671] env[68638]: DEBUG oslo_vmware.api [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834328, 'name': Rename_Task, 'duration_secs': 0.149204} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.966953] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1094.966953] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f35cb9c2-bd61-4b24-9246-b743c6b68fcf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.973989] env[68638]: DEBUG oslo_vmware.api [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1094.973989] env[68638]: value = "task-2834329" [ 1094.973989] env[68638]: _type = "Task" [ 1094.973989] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.981513] env[68638]: DEBUG oslo_vmware.api [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834329, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.999307] env[68638]: DEBUG oslo_vmware.api [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834326, 'name': PowerOnVM_Task, 'duration_secs': 1.095295} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.999593] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1094.999882] env[68638]: INFO nova.compute.manager [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Took 9.52 seconds to spawn the instance on the hypervisor. [ 1095.000088] env[68638]: DEBUG nova.compute.manager [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1095.000827] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1cb3c9-b145-454b-a829-28900847187a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.084828] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e47cd0-5bed-0770-48df-2d6f3f7d5387, 'name': SearchDatastore_Task, 'duration_secs': 0.013357} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.085238] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1095.085374] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] a576ba6f-5e3b-4408-b95d-2084a072ec12/a576ba6f-5e3b-4408-b95d-2084a072ec12.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1095.085613] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7afd816e-9990-4dd4-9201-bc1f280e54d2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.093108] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1095.093108] env[68638]: value = "task-2834330" [ 1095.093108] env[68638]: _type = "Task" [ 1095.093108] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.101361] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834330, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.150595] env[68638]: DEBUG nova.compute.utils [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1095.153979] env[68638]: DEBUG nova.compute.manager [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1095.154245] env[68638]: DEBUG nova.network.neutron [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1095.193132] env[68638]: DEBUG nova.policy [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '075b1dab9233409390d346c7bbfa3d4e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'efa342b9d9a34e9e8e708c8f356f905e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1095.239863] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "refresh_cache-0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.240103] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquired lock "refresh_cache-0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1095.240266] env[68638]: DEBUG nova.network.neutron [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1095.493520] env[68638]: DEBUG oslo_vmware.api [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834329, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.527441] env[68638]: INFO nova.compute.manager [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Took 25.24 seconds to build instance. [ 1095.578580] env[68638]: DEBUG nova.network.neutron [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Successfully created port: ef048785-d375-47e3-9f3c-2f26fd1bb175 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1095.605181] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834330, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.479212} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.605465] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] a576ba6f-5e3b-4408-b95d-2084a072ec12/a576ba6f-5e3b-4408-b95d-2084a072ec12.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1095.605680] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1095.605951] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9b5d6d49-1e64-4db4-9cd4-41f832424294 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.614658] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1095.614658] env[68638]: value = "task-2834331" [ 1095.614658] env[68638]: _type = "Task" [ 1095.614658] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.623825] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834331, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.657913] env[68638]: DEBUG nova.compute.manager [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1095.801084] env[68638]: DEBUG nova.network.neutron [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1095.883631] env[68638]: DEBUG nova.compute.manager [req-923dfe47-437f-4f77-9312-e7749da043c0 req-7aa0a65c-8f45-45df-8f86-b127f8d815c9 service nova] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Received event network-changed-2ee11caa-7a55-450e-b8b2-af4bc1c60e64 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1095.883834] env[68638]: DEBUG nova.compute.manager [req-923dfe47-437f-4f77-9312-e7749da043c0 req-7aa0a65c-8f45-45df-8f86-b127f8d815c9 service nova] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Refreshing instance network info cache due to event network-changed-2ee11caa-7a55-450e-b8b2-af4bc1c60e64. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1095.884065] env[68638]: DEBUG oslo_concurrency.lockutils [req-923dfe47-437f-4f77-9312-e7749da043c0 req-7aa0a65c-8f45-45df-8f86-b127f8d815c9 service nova] Acquiring lock "refresh_cache-c66805eb-fd97-4fe3-984d-8759f227d7fc" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.884218] env[68638]: DEBUG oslo_concurrency.lockutils [req-923dfe47-437f-4f77-9312-e7749da043c0 req-7aa0a65c-8f45-45df-8f86-b127f8d815c9 service nova] Acquired lock "refresh_cache-c66805eb-fd97-4fe3-984d-8759f227d7fc" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1095.884407] env[68638]: DEBUG nova.network.neutron [req-923dfe47-437f-4f77-9312-e7749da043c0 req-7aa0a65c-8f45-45df-8f86-b127f8d815c9 service nova] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Refreshing network info cache for port 2ee11caa-7a55-450e-b8b2-af4bc1c60e64 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1095.942163] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdbc5d06-3bb4-4bd2-a86d-2e38c6602ae0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.951411] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ea1b4c8-b7e0-4aa8-8829-5e65a8c22ae7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.987374] env[68638]: DEBUG nova.network.neutron [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Updating instance_info_cache with network_info: [{"id": "fe93833c-d268-4ad4-8246-17c09472e5db", "address": "fa:16:3e:aa:eb:f2", "network": {"id": "c31bf1cd-7568-43c6-9d99-a1e4d63a62a6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1277511990-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa16293a678b4a35ac0837f6ce904e48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe93833c-d2", "ovs_interfaceid": "fe93833c-d268-4ad4-8246-17c09472e5db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.993332] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0969fbc7-4af5-4250-86c2-ece9d25bff49 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.007745] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44db3a2e-22db-4e4c-8cb4-94acb6ed4160 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.012678] env[68638]: DEBUG oslo_vmware.api [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834329, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.026505] env[68638]: DEBUG nova.compute.provider_tree [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1096.028014] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d90963e1-e91f-4a5d-9ef2-73d029f8d10b tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "c66805eb-fd97-4fe3-984d-8759f227d7fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.750s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.125260] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834331, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080781} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.125578] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1096.126415] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff80eaf9-c02a-4411-9af2-94a2b5a78e73 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.150779] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] a576ba6f-5e3b-4408-b95d-2084a072ec12/a576ba6f-5e3b-4408-b95d-2084a072ec12.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1096.151106] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2556c3ef-53cf-46dd-b602-6673aa30dd9c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.169827] env[68638]: INFO nova.virt.block_device [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Booting with volume aba426ca-0b6c-4510-8544-7a9bd4b9af38 at /dev/sda [ 1096.178874] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1096.178874] env[68638]: value = "task-2834332" [ 1096.178874] env[68638]: _type = "Task" [ 1096.178874] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.186205] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834332, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.211176] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-10b60b89-c5e8-4f17-a648-f411057204c6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.221487] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03651308-05a3-441d-a19f-c348ab4513b6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.259254] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9060f860-4d6f-4a5e-b805-f36a6d020c9d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.269332] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c4fccb5-1525-472a-8429-c61ffd3b71cd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.306607] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a0c0dca-c346-484f-95b9-123bf8282f93 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.313860] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a6551de-bd4b-487e-aaf8-cb6717887a49 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.328521] env[68638]: DEBUG nova.virt.block_device [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Updating existing volume attachment record: aee2b563-4888-420a-8e5d-e1d834378e0b {{(pid=68638) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1096.502024] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Releasing lock "refresh_cache-0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1096.502024] env[68638]: DEBUG nova.compute.manager [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Instance network_info: |[{"id": "fe93833c-d268-4ad4-8246-17c09472e5db", "address": "fa:16:3e:aa:eb:f2", "network": {"id": "c31bf1cd-7568-43c6-9d99-a1e4d63a62a6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1277511990-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa16293a678b4a35ac0837f6ce904e48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe93833c-d2", "ovs_interfaceid": "fe93833c-d268-4ad4-8246-17c09472e5db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1096.502024] env[68638]: DEBUG oslo_vmware.api [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834329, 'name': PowerOnVM_Task, 'duration_secs': 1.148905} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.502024] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:eb:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b5a629f-6902-4d30-9278-74b443a8371d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fe93833c-d268-4ad4-8246-17c09472e5db', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1096.512882] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1096.513149] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1096.513358] env[68638]: INFO nova.compute.manager [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Took 7.99 seconds to spawn the instance on the hypervisor. [ 1096.513538] env[68638]: DEBUG nova.compute.manager [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1096.516168] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1096.517042] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b27cc269-9075-41a2-938d-a93dffb752cc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.519498] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-563d48ab-7e1a-4c04-a5ea-37768d225112 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.534250] env[68638]: DEBUG nova.scheduler.client.report [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1096.545487] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1096.545487] env[68638]: value = "task-2834333" [ 1096.545487] env[68638]: _type = "Task" [ 1096.545487] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.554772] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834333, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.670070] env[68638]: DEBUG nova.network.neutron [req-923dfe47-437f-4f77-9312-e7749da043c0 req-7aa0a65c-8f45-45df-8f86-b127f8d815c9 service nova] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Updated VIF entry in instance network info cache for port 2ee11caa-7a55-450e-b8b2-af4bc1c60e64. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1096.670435] env[68638]: DEBUG nova.network.neutron [req-923dfe47-437f-4f77-9312-e7749da043c0 req-7aa0a65c-8f45-45df-8f86-b127f8d815c9 service nova] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Updating instance_info_cache with network_info: [{"id": "2ee11caa-7a55-450e-b8b2-af4bc1c60e64", "address": "fa:16:3e:6d:6a:dd", "network": {"id": "5de0e424-8bf1-4515-8c49-06607ad85c61", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1760008184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc24eaf6cf74d539558c0a736e18c3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ee11caa-7a", "ovs_interfaceid": "2ee11caa-7a55-450e-b8b2-af4bc1c60e64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1096.687888] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834332, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.731471] env[68638]: DEBUG nova.compute.manager [req-2c6a6855-5f82-4ed5-926b-de06e6e0e89a req-b17129c7-91aa-4752-9876-3199a4998c1b service nova] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Received event network-changed-fe93833c-d268-4ad4-8246-17c09472e5db {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1096.731744] env[68638]: DEBUG nova.compute.manager [req-2c6a6855-5f82-4ed5-926b-de06e6e0e89a req-b17129c7-91aa-4752-9876-3199a4998c1b service nova] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Refreshing instance network info cache due to event network-changed-fe93833c-d268-4ad4-8246-17c09472e5db. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1096.731965] env[68638]: DEBUG oslo_concurrency.lockutils [req-2c6a6855-5f82-4ed5-926b-de06e6e0e89a req-b17129c7-91aa-4752-9876-3199a4998c1b service nova] Acquiring lock "refresh_cache-0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.732169] env[68638]: DEBUG oslo_concurrency.lockutils [req-2c6a6855-5f82-4ed5-926b-de06e6e0e89a req-b17129c7-91aa-4752-9876-3199a4998c1b service nova] Acquired lock "refresh_cache-0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1096.732504] env[68638]: DEBUG nova.network.neutron [req-2c6a6855-5f82-4ed5-926b-de06e6e0e89a req-b17129c7-91aa-4752-9876-3199a4998c1b service nova] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Refreshing network info cache for port fe93833c-d268-4ad4-8246-17c09472e5db {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1097.042106] env[68638]: DEBUG oslo_concurrency.lockutils [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.394s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.042106] env[68638]: DEBUG nova.compute.manager [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1097.042777] env[68638]: DEBUG oslo_concurrency.lockutils [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.159s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.044426] env[68638]: INFO nova.compute.claims [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1097.063373] env[68638]: INFO nova.compute.manager [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Took 24.00 seconds to build instance. [ 1097.069616] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834333, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.166664] env[68638]: DEBUG nova.network.neutron [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Successfully updated port: ef048785-d375-47e3-9f3c-2f26fd1bb175 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1097.173008] env[68638]: DEBUG oslo_concurrency.lockutils [req-923dfe47-437f-4f77-9312-e7749da043c0 req-7aa0a65c-8f45-45df-8f86-b127f8d815c9 service nova] Releasing lock "refresh_cache-c66805eb-fd97-4fe3-984d-8759f227d7fc" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1097.193615] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834332, 'name': ReconfigVM_Task, 'duration_secs': 0.975405} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.194867] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Reconfigured VM instance instance-00000069 to attach disk [datastore2] a576ba6f-5e3b-4408-b95d-2084a072ec12/a576ba6f-5e3b-4408-b95d-2084a072ec12.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1097.195260] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-17af3f7e-6d22-4595-b438-63e23276be16 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.204750] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1097.204750] env[68638]: value = "task-2834334" [ 1097.204750] env[68638]: _type = "Task" [ 1097.204750] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.216561] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834334, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.463969] env[68638]: DEBUG nova.network.neutron [req-2c6a6855-5f82-4ed5-926b-de06e6e0e89a req-b17129c7-91aa-4752-9876-3199a4998c1b service nova] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Updated VIF entry in instance network info cache for port fe93833c-d268-4ad4-8246-17c09472e5db. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1097.464440] env[68638]: DEBUG nova.network.neutron [req-2c6a6855-5f82-4ed5-926b-de06e6e0e89a req-b17129c7-91aa-4752-9876-3199a4998c1b service nova] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Updating instance_info_cache with network_info: [{"id": "fe93833c-d268-4ad4-8246-17c09472e5db", "address": "fa:16:3e:aa:eb:f2", "network": {"id": "c31bf1cd-7568-43c6-9d99-a1e4d63a62a6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1277511990-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa16293a678b4a35ac0837f6ce904e48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe93833c-d2", "ovs_interfaceid": "fe93833c-d268-4ad4-8246-17c09472e5db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.549901] env[68638]: DEBUG nova.compute.utils [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1097.551439] env[68638]: DEBUG nova.compute.manager [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1097.551673] env[68638]: DEBUG nova.network.neutron [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1097.570095] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834333, 'name': CreateVM_Task, 'duration_secs': 0.724325} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.570173] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1097.570740] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6340c717-aff3-4a3d-8702-0af4ebd6cbec tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "a8bd64fb-8a07-4edf-a1fb-c2984e4212ec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.519s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.571289] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.571463] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1097.571765] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1097.572277] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a4177ab-3d5a-46b0-b164-55c6b549a2a7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.578081] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 1097.578081] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52b4935a-1ba5-6cb5-7cf6-765b20a656d9" [ 1097.578081] env[68638]: _type = "Task" [ 1097.578081] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.587619] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b4935a-1ba5-6cb5-7cf6-765b20a656d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.610392] env[68638]: DEBUG nova.policy [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6c191f7f68074d789da810c137e06bfc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cfc3213aecaa4522abe7e5630c93d5a0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1097.670487] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "refresh_cache-e0903192-4fa7-437a-9023-33e8e65124e3" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.670487] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired lock "refresh_cache-e0903192-4fa7-437a-9023-33e8e65124e3" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1097.670487] env[68638]: DEBUG nova.network.neutron [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1097.688077] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "a8bd64fb-8a07-4edf-a1fb-c2984e4212ec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1097.688323] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "a8bd64fb-8a07-4edf-a1fb-c2984e4212ec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.688548] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "a8bd64fb-8a07-4edf-a1fb-c2984e4212ec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1097.688871] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "a8bd64fb-8a07-4edf-a1fb-c2984e4212ec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.689091] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "a8bd64fb-8a07-4edf-a1fb-c2984e4212ec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.691056] env[68638]: INFO nova.compute.manager [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Terminating instance [ 1097.715506] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834334, 'name': Rename_Task, 'duration_secs': 0.30265} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.715775] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1097.716019] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3704d3b1-293b-4ccd-ad51-f30a7ca3cc55 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.724844] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1097.724844] env[68638]: value = "task-2834335" [ 1097.724844] env[68638]: _type = "Task" [ 1097.724844] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.733385] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834335, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.903693] env[68638]: DEBUG nova.network.neutron [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Successfully created port: ddf9710f-5767-4215-876a-d304d09d0b36 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1097.966932] env[68638]: DEBUG oslo_concurrency.lockutils [req-2c6a6855-5f82-4ed5-926b-de06e6e0e89a req-b17129c7-91aa-4752-9876-3199a4998c1b service nova] Releasing lock "refresh_cache-0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1098.054794] env[68638]: DEBUG nova.compute.manager [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1098.094510] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b4935a-1ba5-6cb5-7cf6-765b20a656d9, 'name': SearchDatastore_Task, 'duration_secs': 0.010517} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.098030] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1098.098229] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1098.098882] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.098882] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.098882] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1098.099587] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-03b11b1e-6247-43cb-a0b2-0245467e1b9f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.109522] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1098.109728] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1098.112844] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0740fa8c-0392-4507-926b-536464e5646a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.121348] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 1098.121348] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52576038-dea3-3618-ae89-b00a4d4c024c" [ 1098.121348] env[68638]: _type = "Task" [ 1098.121348] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.133056] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52576038-dea3-3618-ae89-b00a4d4c024c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.194981] env[68638]: DEBUG nova.compute.manager [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1098.194981] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1098.195811] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc926716-4b47-4f24-a368-c46ae0eb06b2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.201743] env[68638]: DEBUG nova.network.neutron [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1098.208034] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1098.208304] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb43b80e-f105-465f-af0d-c9502f987cda {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.216431] env[68638]: DEBUG oslo_vmware.api [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1098.216431] env[68638]: value = "task-2834336" [ 1098.216431] env[68638]: _type = "Task" [ 1098.216431] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.229834] env[68638]: DEBUG oslo_vmware.api [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834336, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.239883] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834335, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.323816] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37397ba1-a6a9-4ea2-a5b8-f4da36233063 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.333414] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-917f095e-9530-411d-8bb7-fd485a68a573 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.369062] env[68638]: DEBUG nova.network.neutron [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Updating instance_info_cache with network_info: [{"id": "ef048785-d375-47e3-9f3c-2f26fd1bb175", "address": "fa:16:3e:65:11:44", "network": {"id": "2181efd7-a094-4c4b-8754-da82e89be85a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1274773453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "efa342b9d9a34e9e8e708c8f356f905e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef048785-d3", "ovs_interfaceid": "ef048785-d375-47e3-9f3c-2f26fd1bb175", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.370963] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aadb27de-5f2c-4df9-9832-5104c8a16eb3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.383420] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04e2f63-761e-4d8b-a4ab-4c494a10f304 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.400401] env[68638]: DEBUG nova.compute.provider_tree [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1098.415871] env[68638]: DEBUG nova.compute.manager [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1098.415871] env[68638]: DEBUG nova.virt.hardware [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1098.416130] env[68638]: DEBUG nova.virt.hardware [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1098.416168] env[68638]: DEBUG nova.virt.hardware [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1098.416420] env[68638]: DEBUG nova.virt.hardware [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1098.416652] env[68638]: DEBUG nova.virt.hardware [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1098.416718] env[68638]: DEBUG nova.virt.hardware [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1098.416926] env[68638]: DEBUG nova.virt.hardware [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1098.417106] env[68638]: DEBUG nova.virt.hardware [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1098.417286] env[68638]: DEBUG nova.virt.hardware [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1098.417479] env[68638]: DEBUG nova.virt.hardware [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1098.417625] env[68638]: DEBUG nova.virt.hardware [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1098.419009] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc88159c-8e07-40c3-8ff7-86db07fbc5dc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.427925] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cedf5774-5096-4c76-93dc-425c91cae09e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.632990] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52576038-dea3-3618-ae89-b00a4d4c024c, 'name': SearchDatastore_Task, 'duration_secs': 0.010875} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.633816] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aba77bb5-391f-4369-a52f-3c6c8c45fac3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.639452] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 1098.639452] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]520ae17a-e912-0f52-188a-aa7a086da585" [ 1098.639452] env[68638]: _type = "Task" [ 1098.639452] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.647281] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]520ae17a-e912-0f52-188a-aa7a086da585, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.727874] env[68638]: DEBUG oslo_vmware.api [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834336, 'name': PowerOffVM_Task, 'duration_secs': 0.210902} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.728236] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1098.728412] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1098.728677] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-725887fd-0d58-4b92-a925-c295bc16c501 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.738684] env[68638]: DEBUG oslo_vmware.api [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834335, 'name': PowerOnVM_Task, 'duration_secs': 0.848485} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.738930] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1098.739142] env[68638]: INFO nova.compute.manager [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Took 7.87 seconds to spawn the instance on the hypervisor. [ 1098.739319] env[68638]: DEBUG nova.compute.manager [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1098.740046] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0581f610-0cc6-459f-92f4-c4743cfbef7e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.759019] env[68638]: DEBUG nova.compute.manager [req-98c4d7d2-2f9e-4de7-b2aa-746848b60cc4 req-6218d2d5-50b1-4230-8400-38e3dc392b12 service nova] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Received event network-vif-plugged-ef048785-d375-47e3-9f3c-2f26fd1bb175 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1098.759339] env[68638]: DEBUG oslo_concurrency.lockutils [req-98c4d7d2-2f9e-4de7-b2aa-746848b60cc4 req-6218d2d5-50b1-4230-8400-38e3dc392b12 service nova] Acquiring lock "e0903192-4fa7-437a-9023-33e8e65124e3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1098.759621] env[68638]: DEBUG oslo_concurrency.lockutils [req-98c4d7d2-2f9e-4de7-b2aa-746848b60cc4 req-6218d2d5-50b1-4230-8400-38e3dc392b12 service nova] Lock "e0903192-4fa7-437a-9023-33e8e65124e3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1098.759818] env[68638]: DEBUG oslo_concurrency.lockutils [req-98c4d7d2-2f9e-4de7-b2aa-746848b60cc4 req-6218d2d5-50b1-4230-8400-38e3dc392b12 service nova] Lock "e0903192-4fa7-437a-9023-33e8e65124e3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.759990] env[68638]: DEBUG nova.compute.manager [req-98c4d7d2-2f9e-4de7-b2aa-746848b60cc4 req-6218d2d5-50b1-4230-8400-38e3dc392b12 service nova] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] No waiting events found dispatching network-vif-plugged-ef048785-d375-47e3-9f3c-2f26fd1bb175 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1098.761029] env[68638]: WARNING nova.compute.manager [req-98c4d7d2-2f9e-4de7-b2aa-746848b60cc4 req-6218d2d5-50b1-4230-8400-38e3dc392b12 service nova] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Received unexpected event network-vif-plugged-ef048785-d375-47e3-9f3c-2f26fd1bb175 for instance with vm_state building and task_state spawning. [ 1098.761029] env[68638]: DEBUG nova.compute.manager [req-98c4d7d2-2f9e-4de7-b2aa-746848b60cc4 req-6218d2d5-50b1-4230-8400-38e3dc392b12 service nova] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Received event network-changed-ef048785-d375-47e3-9f3c-2f26fd1bb175 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1098.761029] env[68638]: DEBUG nova.compute.manager [req-98c4d7d2-2f9e-4de7-b2aa-746848b60cc4 req-6218d2d5-50b1-4230-8400-38e3dc392b12 service nova] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Refreshing instance network info cache due to event network-changed-ef048785-d375-47e3-9f3c-2f26fd1bb175. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1098.761029] env[68638]: DEBUG oslo_concurrency.lockutils [req-98c4d7d2-2f9e-4de7-b2aa-746848b60cc4 req-6218d2d5-50b1-4230-8400-38e3dc392b12 service nova] Acquiring lock "refresh_cache-e0903192-4fa7-437a-9023-33e8e65124e3" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.802561] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1098.802795] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1098.803054] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Deleting the datastore file [datastore2] a8bd64fb-8a07-4edf-a1fb-c2984e4212ec {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1098.803401] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-840bd05a-4305-44ed-be33-07f2f054e79c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.809989] env[68638]: DEBUG oslo_vmware.api [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1098.809989] env[68638]: value = "task-2834338" [ 1098.809989] env[68638]: _type = "Task" [ 1098.809989] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.819560] env[68638]: DEBUG oslo_vmware.api [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834338, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.875236] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Releasing lock "refresh_cache-e0903192-4fa7-437a-9023-33e8e65124e3" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1098.875679] env[68638]: DEBUG nova.compute.manager [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Instance network_info: |[{"id": "ef048785-d375-47e3-9f3c-2f26fd1bb175", "address": "fa:16:3e:65:11:44", "network": {"id": "2181efd7-a094-4c4b-8754-da82e89be85a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1274773453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "efa342b9d9a34e9e8e708c8f356f905e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef048785-d3", "ovs_interfaceid": "ef048785-d375-47e3-9f3c-2f26fd1bb175", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1098.876068] env[68638]: DEBUG oslo_concurrency.lockutils [req-98c4d7d2-2f9e-4de7-b2aa-746848b60cc4 req-6218d2d5-50b1-4230-8400-38e3dc392b12 service nova] Acquired lock "refresh_cache-e0903192-4fa7-437a-9023-33e8e65124e3" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.876361] env[68638]: DEBUG nova.network.neutron [req-98c4d7d2-2f9e-4de7-b2aa-746848b60cc4 req-6218d2d5-50b1-4230-8400-38e3dc392b12 service nova] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Refreshing network info cache for port ef048785-d375-47e3-9f3c-2f26fd1bb175 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1098.877764] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:11:44', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69cfa7ba-6989-4d75-9495-97b5fea00c3c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef048785-d375-47e3-9f3c-2f26fd1bb175', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1098.885643] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1098.888666] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1098.889181] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f390445-42e3-4dcf-86a1-3fe5b4f4b895 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.905043] env[68638]: DEBUG nova.scheduler.client.report [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1098.915082] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1098.915082] env[68638]: value = "task-2834339" [ 1098.915082] env[68638]: _type = "Task" [ 1098.915082] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.925139] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834339, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.065034] env[68638]: DEBUG nova.compute.manager [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1099.097233] env[68638]: DEBUG nova.virt.hardware [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1099.097233] env[68638]: DEBUG nova.virt.hardware [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1099.097233] env[68638]: DEBUG nova.virt.hardware [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1099.097233] env[68638]: DEBUG nova.virt.hardware [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1099.097450] env[68638]: DEBUG nova.virt.hardware [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1099.097450] env[68638]: DEBUG nova.virt.hardware [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1099.097941] env[68638]: DEBUG nova.virt.hardware [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1099.097941] env[68638]: DEBUG nova.virt.hardware [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1099.098137] env[68638]: DEBUG nova.virt.hardware [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1099.098685] env[68638]: DEBUG nova.virt.hardware [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1099.098685] env[68638]: DEBUG nova.virt.hardware [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1099.099337] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e8130a-28b2-4715-87e1-e3c53b72d4f0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.108729] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-601bdf7f-082a-40d4-bd15-da4340538cf5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.150261] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]520ae17a-e912-0f52-188a-aa7a086da585, 'name': SearchDatastore_Task, 'duration_secs': 0.011534} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.150543] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1099.150825] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1/0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1099.151131] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9a9f7273-14a9-4a71-b621-7a7feb69f102 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.158883] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 1099.158883] env[68638]: value = "task-2834340" [ 1099.158883] env[68638]: _type = "Task" [ 1099.158883] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.168898] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834340, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.178087] env[68638]: DEBUG nova.network.neutron [req-98c4d7d2-2f9e-4de7-b2aa-746848b60cc4 req-6218d2d5-50b1-4230-8400-38e3dc392b12 service nova] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Updated VIF entry in instance network info cache for port ef048785-d375-47e3-9f3c-2f26fd1bb175. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1099.178619] env[68638]: DEBUG nova.network.neutron [req-98c4d7d2-2f9e-4de7-b2aa-746848b60cc4 req-6218d2d5-50b1-4230-8400-38e3dc392b12 service nova] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Updating instance_info_cache with network_info: [{"id": "ef048785-d375-47e3-9f3c-2f26fd1bb175", "address": "fa:16:3e:65:11:44", "network": {"id": "2181efd7-a094-4c4b-8754-da82e89be85a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1274773453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "efa342b9d9a34e9e8e708c8f356f905e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef048785-d3", "ovs_interfaceid": "ef048785-d375-47e3-9f3c-2f26fd1bb175", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.259742] env[68638]: INFO nova.compute.manager [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Took 24.98 seconds to build instance. [ 1099.320756] env[68638]: DEBUG oslo_vmware.api [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834338, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164825} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.320974] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1099.321185] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1099.321412] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1099.321590] env[68638]: INFO nova.compute.manager [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1099.321844] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1099.322078] env[68638]: DEBUG nova.compute.manager [-] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1099.322186] env[68638]: DEBUG nova.network.neutron [-] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1099.410356] env[68638]: DEBUG oslo_concurrency.lockutils [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.368s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1099.411054] env[68638]: DEBUG nova.compute.manager [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1099.414453] env[68638]: DEBUG oslo_concurrency.lockutils [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.647s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1099.414764] env[68638]: DEBUG nova.objects.instance [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lazy-loading 'resources' on Instance uuid 6213446a-f6a4-439b-a1ed-5b8c2234d6ac {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1099.435377] env[68638]: DEBUG nova.network.neutron [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Successfully updated port: ddf9710f-5767-4215-876a-d304d09d0b36 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1099.440518] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834339, 'name': CreateVM_Task, 'duration_secs': 0.378435} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.440971] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1099.441713] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sda', 'attachment_id': 'aee2b563-4888-420a-8e5d-e1d834378e0b', 'device_type': None, 'disk_bus': None, 'delete_on_termination': True, 'guest_format': None, 'boot_index': 0, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570016', 'volume_id': 'aba426ca-0b6c-4510-8544-7a9bd4b9af38', 'name': 'volume-aba426ca-0b6c-4510-8544-7a9bd4b9af38', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e0903192-4fa7-437a-9023-33e8e65124e3', 'attached_at': '', 'detached_at': '', 'volume_id': 'aba426ca-0b6c-4510-8544-7a9bd4b9af38', 'serial': 'aba426ca-0b6c-4510-8544-7a9bd4b9af38'}, 'volume_type': None}], 'swap': None} {{(pid=68638) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1099.442303] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Root volume attach. Driver type: vmdk {{(pid=68638) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1099.443356] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e843c8-db4b-4b1a-a569-6cee260403b7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.455809] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96522622-df1d-4cec-9c44-fc554dc19414 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.466784] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a7784a-cb8a-4c15-a08e-f0c072789fcd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.477265] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-470cf7b1-43f4-4d8d-9bbc-d4a628cf19fc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.488059] env[68638]: DEBUG oslo_vmware.api [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1099.488059] env[68638]: value = "task-2834341" [ 1099.488059] env[68638]: _type = "Task" [ 1099.488059] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.499985] env[68638]: DEBUG oslo_vmware.api [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834341, 'name': RelocateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.675018] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834340, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507187} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.675146] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1/0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1099.675695] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1099.675695] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bb0aa800-733c-4be5-8e02-76b65598be3e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.681360] env[68638]: DEBUG oslo_concurrency.lockutils [req-98c4d7d2-2f9e-4de7-b2aa-746848b60cc4 req-6218d2d5-50b1-4230-8400-38e3dc392b12 service nova] Releasing lock "refresh_cache-e0903192-4fa7-437a-9023-33e8e65124e3" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1099.683329] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 1099.683329] env[68638]: value = "task-2834342" [ 1099.683329] env[68638]: _type = "Task" [ 1099.683329] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.694484] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834342, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.702346] env[68638]: DEBUG nova.compute.manager [req-c63147ea-323e-47bf-8fe6-da6e4a86a30b req-73061edb-0cff-4a38-b9bb-3f441b50df78 service nova] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Received event network-vif-deleted-265a1f2f-5f38-40ef-a00c-d006ca04c011 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1099.702540] env[68638]: INFO nova.compute.manager [req-c63147ea-323e-47bf-8fe6-da6e4a86a30b req-73061edb-0cff-4a38-b9bb-3f441b50df78 service nova] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Neutron deleted interface 265a1f2f-5f38-40ef-a00c-d006ca04c011; detaching it from the instance and deleting it from the info cache [ 1099.703585] env[68638]: DEBUG nova.network.neutron [req-c63147ea-323e-47bf-8fe6-da6e4a86a30b req-73061edb-0cff-4a38-b9bb-3f441b50df78 service nova] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.762242] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4fd4fc4e-e52c-4431-8856-e5cd6e35b805 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "a576ba6f-5e3b-4408-b95d-2084a072ec12" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.502s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1099.930682] env[68638]: DEBUG nova.compute.utils [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1099.938747] env[68638]: DEBUG nova.compute.manager [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1099.939012] env[68638]: DEBUG nova.network.neutron [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1099.941398] env[68638]: DEBUG oslo_concurrency.lockutils [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Acquiring lock "refresh_cache-cb8611f1-d987-43f9-bb4e-4b404c952510" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.941530] env[68638]: DEBUG oslo_concurrency.lockutils [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Acquired lock "refresh_cache-cb8611f1-d987-43f9-bb4e-4b404c952510" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1099.941668] env[68638]: DEBUG nova.network.neutron [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1099.980051] env[68638]: DEBUG nova.policy [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7569a0fd95c644d38ef18de41870bde4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35fdd5447a0546b7b0fe2ed9ea0efc73', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1100.000501] env[68638]: DEBUG oslo_vmware.api [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834341, 'name': RelocateVM_Task} progress is 40%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.157915] env[68638]: DEBUG nova.network.neutron [-] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1100.160304] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquiring lock "4a0c0188-69bb-441e-a930-ab20be5b2319" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1100.160304] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "4a0c0188-69bb-441e-a930-ab20be5b2319" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1100.195464] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834342, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071926} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.196019] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1100.197176] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d307aaa-f501-4741-bb00-c1a1f21d2d78 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.220432] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f7f33cd2-67df-45f9-be32-7f838d171e1b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.233502] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1/0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1100.237298] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0cea4aba-2100-4f6f-8800-c1e380241fe6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.263690] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a0e7d8b-276f-44bd-ac86-56532d0ad954 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.278701] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 1100.278701] env[68638]: value = "task-2834343" [ 1100.278701] env[68638]: _type = "Task" [ 1100.278701] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.279466] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-148cd690-bc0b-4b4e-a68d-f0fa188e4020 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.293216] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834343, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.294407] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35631c11-eeef-480e-9cc7-e92f278c0912 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.311733] env[68638]: DEBUG nova.compute.manager [req-c63147ea-323e-47bf-8fe6-da6e4a86a30b req-73061edb-0cff-4a38-b9bb-3f441b50df78 service nova] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Detach interface failed, port_id=265a1f2f-5f38-40ef-a00c-d006ca04c011, reason: Instance a8bd64fb-8a07-4edf-a1fb-c2984e4212ec could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1100.341592] env[68638]: DEBUG nova.network.neutron [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Successfully created port: 22c8d069-e6d1-4644-89d8-516903e4ef3d {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1100.344161] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-042d9078-f84d-4d87-b606-230575bd8a87 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.353310] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31200ca0-ed81-4367-967e-b2953eb89897 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.370405] env[68638]: DEBUG nova.compute.provider_tree [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1100.439026] env[68638]: DEBUG nova.compute.manager [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1100.477991] env[68638]: DEBUG nova.network.neutron [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1100.499703] env[68638]: DEBUG oslo_vmware.api [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834341, 'name': RelocateVM_Task} progress is 53%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.640026] env[68638]: DEBUG nova.network.neutron [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Updating instance_info_cache with network_info: [{"id": "ddf9710f-5767-4215-876a-d304d09d0b36", "address": "fa:16:3e:03:4b:b0", "network": {"id": "ab4eed24-61b2-41c2-b2a6-457c3f834c5a", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-524149289-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfc3213aecaa4522abe7e5630c93d5a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapddf9710f-57", "ovs_interfaceid": "ddf9710f-5767-4215-876a-d304d09d0b36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1100.663049] env[68638]: INFO nova.compute.manager [-] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Took 1.34 seconds to deallocate network for instance. [ 1100.663049] env[68638]: DEBUG nova.compute.manager [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1100.793982] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834343, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.851337] env[68638]: DEBUG nova.compute.manager [req-043181e9-643e-4efd-a53e-56dde701e14c req-f3a03607-fa3f-4a91-8e74-ae7eeaafbb5f service nova] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Received event network-vif-plugged-ddf9710f-5767-4215-876a-d304d09d0b36 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1100.851337] env[68638]: DEBUG oslo_concurrency.lockutils [req-043181e9-643e-4efd-a53e-56dde701e14c req-f3a03607-fa3f-4a91-8e74-ae7eeaafbb5f service nova] Acquiring lock "cb8611f1-d987-43f9-bb4e-4b404c952510-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1100.851337] env[68638]: DEBUG oslo_concurrency.lockutils [req-043181e9-643e-4efd-a53e-56dde701e14c req-f3a03607-fa3f-4a91-8e74-ae7eeaafbb5f service nova] Lock "cb8611f1-d987-43f9-bb4e-4b404c952510-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1100.851337] env[68638]: DEBUG oslo_concurrency.lockutils [req-043181e9-643e-4efd-a53e-56dde701e14c req-f3a03607-fa3f-4a91-8e74-ae7eeaafbb5f service nova] Lock "cb8611f1-d987-43f9-bb4e-4b404c952510-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.851554] env[68638]: DEBUG nova.compute.manager [req-043181e9-643e-4efd-a53e-56dde701e14c req-f3a03607-fa3f-4a91-8e74-ae7eeaafbb5f service nova] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] No waiting events found dispatching network-vif-plugged-ddf9710f-5767-4215-876a-d304d09d0b36 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1100.851647] env[68638]: WARNING nova.compute.manager [req-043181e9-643e-4efd-a53e-56dde701e14c req-f3a03607-fa3f-4a91-8e74-ae7eeaafbb5f service nova] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Received unexpected event network-vif-plugged-ddf9710f-5767-4215-876a-d304d09d0b36 for instance with vm_state building and task_state spawning. [ 1100.851802] env[68638]: DEBUG nova.compute.manager [req-043181e9-643e-4efd-a53e-56dde701e14c req-f3a03607-fa3f-4a91-8e74-ae7eeaafbb5f service nova] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Received event network-changed-ddf9710f-5767-4215-876a-d304d09d0b36 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1100.851952] env[68638]: DEBUG nova.compute.manager [req-043181e9-643e-4efd-a53e-56dde701e14c req-f3a03607-fa3f-4a91-8e74-ae7eeaafbb5f service nova] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Refreshing instance network info cache due to event network-changed-ddf9710f-5767-4215-876a-d304d09d0b36. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1100.852139] env[68638]: DEBUG oslo_concurrency.lockutils [req-043181e9-643e-4efd-a53e-56dde701e14c req-f3a03607-fa3f-4a91-8e74-ae7eeaafbb5f service nova] Acquiring lock "refresh_cache-cb8611f1-d987-43f9-bb4e-4b404c952510" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.873792] env[68638]: DEBUG nova.scheduler.client.report [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1101.000943] env[68638]: DEBUG oslo_vmware.api [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834341, 'name': RelocateVM_Task} progress is 67%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.144146] env[68638]: DEBUG oslo_concurrency.lockutils [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Releasing lock "refresh_cache-cb8611f1-d987-43f9-bb4e-4b404c952510" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1101.144645] env[68638]: DEBUG nova.compute.manager [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Instance network_info: |[{"id": "ddf9710f-5767-4215-876a-d304d09d0b36", "address": "fa:16:3e:03:4b:b0", "network": {"id": "ab4eed24-61b2-41c2-b2a6-457c3f834c5a", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-524149289-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfc3213aecaa4522abe7e5630c93d5a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapddf9710f-57", "ovs_interfaceid": "ddf9710f-5767-4215-876a-d304d09d0b36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1101.145038] env[68638]: DEBUG oslo_concurrency.lockutils [req-043181e9-643e-4efd-a53e-56dde701e14c req-f3a03607-fa3f-4a91-8e74-ae7eeaafbb5f service nova] Acquired lock "refresh_cache-cb8611f1-d987-43f9-bb4e-4b404c952510" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1101.145331] env[68638]: DEBUG nova.network.neutron [req-043181e9-643e-4efd-a53e-56dde701e14c req-f3a03607-fa3f-4a91-8e74-ae7eeaafbb5f service nova] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Refreshing network info cache for port ddf9710f-5767-4215-876a-d304d09d0b36 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1101.146882] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:4b:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271fe7a0-dfd7-409b-920a-cf83ef1a86a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ddf9710f-5767-4215-876a-d304d09d0b36', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1101.156371] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Creating folder: Project (cfc3213aecaa4522abe7e5630c93d5a0). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1101.160176] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b089b2de-56c4-40ae-b501-2d58a2c61979 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.174577] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1101.177776] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Created folder: Project (cfc3213aecaa4522abe7e5630c93d5a0) in parent group-v569734. [ 1101.177984] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Creating folder: Instances. Parent ref: group-v570029. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1101.178284] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a3d14ec9-c585-4317-87c2-097510270934 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.189794] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1101.197734] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Created folder: Instances in parent group-v570029. [ 1101.198043] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1101.198265] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1101.198482] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2c49d282-7dea-4ced-9d04-2200270cf5e7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.220602] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1101.220602] env[68638]: value = "task-2834346" [ 1101.220602] env[68638]: _type = "Task" [ 1101.220602] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.231047] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834346, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.296352] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834343, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.378561] env[68638]: DEBUG oslo_concurrency.lockutils [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.964s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1101.385598] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.362s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1101.387284] env[68638]: INFO nova.compute.claims [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1101.424580] env[68638]: INFO nova.scheduler.client.report [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Deleted allocations for instance 6213446a-f6a4-439b-a1ed-5b8c2234d6ac [ 1101.447432] env[68638]: DEBUG nova.network.neutron [req-043181e9-643e-4efd-a53e-56dde701e14c req-f3a03607-fa3f-4a91-8e74-ae7eeaafbb5f service nova] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Updated VIF entry in instance network info cache for port ddf9710f-5767-4215-876a-d304d09d0b36. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1101.447954] env[68638]: DEBUG nova.network.neutron [req-043181e9-643e-4efd-a53e-56dde701e14c req-f3a03607-fa3f-4a91-8e74-ae7eeaafbb5f service nova] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Updating instance_info_cache with network_info: [{"id": "ddf9710f-5767-4215-876a-d304d09d0b36", "address": "fa:16:3e:03:4b:b0", "network": {"id": "ab4eed24-61b2-41c2-b2a6-457c3f834c5a", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-524149289-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfc3213aecaa4522abe7e5630c93d5a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapddf9710f-57", "ovs_interfaceid": "ddf9710f-5767-4215-876a-d304d09d0b36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1101.452040] env[68638]: DEBUG nova.compute.manager [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1101.482539] env[68638]: DEBUG nova.virt.hardware [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1101.482800] env[68638]: DEBUG nova.virt.hardware [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1101.483019] env[68638]: DEBUG nova.virt.hardware [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1101.483223] env[68638]: DEBUG nova.virt.hardware [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1101.483386] env[68638]: DEBUG nova.virt.hardware [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1101.483549] env[68638]: DEBUG nova.virt.hardware [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1101.483765] env[68638]: DEBUG nova.virt.hardware [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1101.483936] env[68638]: DEBUG nova.virt.hardware [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1101.484189] env[68638]: DEBUG nova.virt.hardware [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1101.484424] env[68638]: DEBUG nova.virt.hardware [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1101.484617] env[68638]: DEBUG nova.virt.hardware [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1101.485865] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efff261f-38a4-4544-8dae-58fa4ee604f5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.499031] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80c72a5d-2928-4fa9-90cd-3f721068d848 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.507507] env[68638]: DEBUG oslo_vmware.api [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834341, 'name': RelocateVM_Task} progress is 81%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.734984] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834346, 'name': CreateVM_Task, 'duration_secs': 0.511694} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.735253] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1101.735950] env[68638]: DEBUG oslo_concurrency.lockutils [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.736199] env[68638]: DEBUG oslo_concurrency.lockutils [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1101.736613] env[68638]: DEBUG oslo_concurrency.lockutils [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1101.736878] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1344c635-1b02-43b2-9e99-38d05bac2c19 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.743224] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Waiting for the task: (returnval){ [ 1101.743224] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52f4bb9a-efac-54cf-5aff-69274213c418" [ 1101.743224] env[68638]: _type = "Task" [ 1101.743224] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.753752] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f4bb9a-efac-54cf-5aff-69274213c418, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.771954] env[68638]: DEBUG nova.compute.manager [req-0e78b2f3-130a-48ca-b7f6-a3f34f02e8dd req-1e5468de-a478-4521-8915-41135df4ffa9 service nova] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Received event network-vif-plugged-22c8d069-e6d1-4644-89d8-516903e4ef3d {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1101.772261] env[68638]: DEBUG oslo_concurrency.lockutils [req-0e78b2f3-130a-48ca-b7f6-a3f34f02e8dd req-1e5468de-a478-4521-8915-41135df4ffa9 service nova] Acquiring lock "ba07529b-e6d0-4c22-b938-c4908a7eafd7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1101.772469] env[68638]: DEBUG oslo_concurrency.lockutils [req-0e78b2f3-130a-48ca-b7f6-a3f34f02e8dd req-1e5468de-a478-4521-8915-41135df4ffa9 service nova] Lock "ba07529b-e6d0-4c22-b938-c4908a7eafd7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1101.772637] env[68638]: DEBUG oslo_concurrency.lockutils [req-0e78b2f3-130a-48ca-b7f6-a3f34f02e8dd req-1e5468de-a478-4521-8915-41135df4ffa9 service nova] Lock "ba07529b-e6d0-4c22-b938-c4908a7eafd7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1101.772802] env[68638]: DEBUG nova.compute.manager [req-0e78b2f3-130a-48ca-b7f6-a3f34f02e8dd req-1e5468de-a478-4521-8915-41135df4ffa9 service nova] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] No waiting events found dispatching network-vif-plugged-22c8d069-e6d1-4644-89d8-516903e4ef3d {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1101.772962] env[68638]: WARNING nova.compute.manager [req-0e78b2f3-130a-48ca-b7f6-a3f34f02e8dd req-1e5468de-a478-4521-8915-41135df4ffa9 service nova] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Received unexpected event network-vif-plugged-22c8d069-e6d1-4644-89d8-516903e4ef3d for instance with vm_state building and task_state spawning. [ 1101.796084] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834343, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.900243] env[68638]: DEBUG nova.network.neutron [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Successfully updated port: 22c8d069-e6d1-4644-89d8-516903e4ef3d {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1101.933043] env[68638]: DEBUG oslo_concurrency.lockutils [None req-951bea0b-c231-4ab4-adce-17831d63b4cd tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "6213446a-f6a4-439b-a1ed-5b8c2234d6ac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.599s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1101.950354] env[68638]: DEBUG oslo_concurrency.lockutils [req-043181e9-643e-4efd-a53e-56dde701e14c req-f3a03607-fa3f-4a91-8e74-ae7eeaafbb5f service nova] Releasing lock "refresh_cache-cb8611f1-d987-43f9-bb4e-4b404c952510" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1102.002569] env[68638]: DEBUG oslo_vmware.api [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834341, 'name': RelocateVM_Task} progress is 92%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.254995] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f4bb9a-efac-54cf-5aff-69274213c418, 'name': SearchDatastore_Task, 'duration_secs': 0.012773} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.255502] env[68638]: DEBUG oslo_concurrency.lockutils [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1102.255627] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1102.255762] env[68638]: DEBUG oslo_concurrency.lockutils [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.255909] env[68638]: DEBUG oslo_concurrency.lockutils [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1102.256097] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1102.256400] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f228b4d-3e67-477f-86a1-ca68a12227b9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.265771] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1102.266013] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1102.266707] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c8fdae6-b772-4181-a3df-a8f626a36d26 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.272054] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Waiting for the task: (returnval){ [ 1102.272054] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]527c7d8c-9b2d-8b3f-52a8-9244369f6800" [ 1102.272054] env[68638]: _type = "Task" [ 1102.272054] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.279909] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527c7d8c-9b2d-8b3f-52a8-9244369f6800, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.293769] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834343, 'name': ReconfigVM_Task, 'duration_secs': 1.909103} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.294479] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Reconfigured VM instance instance-0000006a to attach disk [datastore2] 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1/0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1102.295143] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c9bdc56c-52d2-4bfe-a5b6-d6d69bd2d947 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.302222] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 1102.302222] env[68638]: value = "task-2834347" [ 1102.302222] env[68638]: _type = "Task" [ 1102.302222] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.310302] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834347, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.401507] env[68638]: DEBUG oslo_concurrency.lockutils [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "refresh_cache-ba07529b-e6d0-4c22-b938-c4908a7eafd7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.401688] env[68638]: DEBUG oslo_concurrency.lockutils [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired lock "refresh_cache-ba07529b-e6d0-4c22-b938-c4908a7eafd7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1102.401841] env[68638]: DEBUG nova.network.neutron [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1102.506539] env[68638]: DEBUG oslo_vmware.api [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834341, 'name': RelocateVM_Task} progress is 98%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.641326] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82f1cbfb-38df-427d-a621-a8cea93107f1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.649999] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5962a1d8-a612-47bd-8ef5-bf7756387125 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.686520] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-790f3b8d-cf0d-474c-8fae-6413b2c92e37 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.694981] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb24076-4b20-48c6-afb1-bf81a72a0076 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.709360] env[68638]: DEBUG nova.compute.provider_tree [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1102.783468] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527c7d8c-9b2d-8b3f-52a8-9244369f6800, 'name': SearchDatastore_Task, 'duration_secs': 0.009178} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.784298] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36c893dd-ad00-4602-9a7e-ddba62c05b81 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.790788] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Waiting for the task: (returnval){ [ 1102.790788] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5208d3e7-22ba-20c4-239f-a3ab6c767a1b" [ 1102.790788] env[68638]: _type = "Task" [ 1102.790788] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.798786] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5208d3e7-22ba-20c4-239f-a3ab6c767a1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.811447] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834347, 'name': Rename_Task, 'duration_secs': 0.207364} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.811767] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1102.812024] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2edd0ead-346c-439c-9a3e-00eea52c2758 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.818498] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 1102.818498] env[68638]: value = "task-2834348" [ 1102.818498] env[68638]: _type = "Task" [ 1102.818498] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.826531] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834348, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.935059] env[68638]: DEBUG nova.network.neutron [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1103.003302] env[68638]: DEBUG oslo_vmware.api [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834341, 'name': RelocateVM_Task, 'duration_secs': 3.514107} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.003648] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Volume attach. Driver type: vmdk {{(pid=68638) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1103.003882] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570016', 'volume_id': 'aba426ca-0b6c-4510-8544-7a9bd4b9af38', 'name': 'volume-aba426ca-0b6c-4510-8544-7a9bd4b9af38', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e0903192-4fa7-437a-9023-33e8e65124e3', 'attached_at': '', 'detached_at': '', 'volume_id': 'aba426ca-0b6c-4510-8544-7a9bd4b9af38', 'serial': 'aba426ca-0b6c-4510-8544-7a9bd4b9af38'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1103.007139] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ca73c13-c1c7-484b-994d-d1e4f79b5826 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.023850] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbf016d6-06b4-419b-a8ab-7741beff7815 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.047260] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] volume-aba426ca-0b6c-4510-8544-7a9bd4b9af38/volume-aba426ca-0b6c-4510-8544-7a9bd4b9af38.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1103.047520] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c14f2ca9-0575-4975-a9bc-c66496e52cb5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.070536] env[68638]: DEBUG oslo_vmware.api [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1103.070536] env[68638]: value = "task-2834349" [ 1103.070536] env[68638]: _type = "Task" [ 1103.070536] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.079147] env[68638]: DEBUG oslo_vmware.api [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834349, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.107619] env[68638]: DEBUG nova.network.neutron [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Updating instance_info_cache with network_info: [{"id": "22c8d069-e6d1-4644-89d8-516903e4ef3d", "address": "fa:16:3e:07:da:cc", "network": {"id": "e7719a30-81aa-48f1-a272-5246f78d9891", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1890376720-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fdd5447a0546b7b0fe2ed9ea0efc73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22c8d069-e6", "ovs_interfaceid": "22c8d069-e6d1-4644-89d8-516903e4ef3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1103.212582] env[68638]: DEBUG nova.scheduler.client.report [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1103.303260] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5208d3e7-22ba-20c4-239f-a3ab6c767a1b, 'name': SearchDatastore_Task, 'duration_secs': 0.010136} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.303524] env[68638]: DEBUG oslo_concurrency.lockutils [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1103.304305] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] cb8611f1-d987-43f9-bb4e-4b404c952510/cb8611f1-d987-43f9-bb4e-4b404c952510.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1103.304305] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba16d730-6f55-4122-8c31-5dc3a1623530 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.314149] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Waiting for the task: (returnval){ [ 1103.314149] env[68638]: value = "task-2834350" [ 1103.314149] env[68638]: _type = "Task" [ 1103.314149] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.325638] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Task: {'id': task-2834350, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.331890] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834348, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.582411] env[68638]: DEBUG oslo_vmware.api [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834349, 'name': ReconfigVM_Task, 'duration_secs': 0.370019} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.582735] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Reconfigured VM instance instance-0000006b to attach disk [datastore1] volume-aba426ca-0b6c-4510-8544-7a9bd4b9af38/volume-aba426ca-0b6c-4510-8544-7a9bd4b9af38.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1103.587986] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91796e18-5c45-400a-b789-ba2fdef24021 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.604780] env[68638]: DEBUG oslo_vmware.api [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1103.604780] env[68638]: value = "task-2834351" [ 1103.604780] env[68638]: _type = "Task" [ 1103.604780] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.610065] env[68638]: DEBUG oslo_concurrency.lockutils [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Releasing lock "refresh_cache-ba07529b-e6d0-4c22-b938-c4908a7eafd7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1103.610268] env[68638]: DEBUG nova.compute.manager [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Instance network_info: |[{"id": "22c8d069-e6d1-4644-89d8-516903e4ef3d", "address": "fa:16:3e:07:da:cc", "network": {"id": "e7719a30-81aa-48f1-a272-5246f78d9891", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1890376720-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fdd5447a0546b7b0fe2ed9ea0efc73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22c8d069-e6", "ovs_interfaceid": "22c8d069-e6d1-4644-89d8-516903e4ef3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1103.610651] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:da:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '22c8d069-e6d1-4644-89d8-516903e4ef3d', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1103.618433] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1103.623068] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1103.623068] env[68638]: DEBUG oslo_vmware.api [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834351, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.623068] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea98448e-909d-43b3-bcc4-498e0e92632b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.646953] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1103.646953] env[68638]: value = "task-2834352" [ 1103.646953] env[68638]: _type = "Task" [ 1103.646953] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.658964] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834352, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.718268] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.333s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1103.718961] env[68638]: DEBUG nova.compute.manager [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1103.721889] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 14.803s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.805027] env[68638]: DEBUG nova.compute.manager [req-2bd4f087-83de-4a80-8761-355bc32de186 req-5d2465bc-5c75-4072-ad25-d357ec5117b3 service nova] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Received event network-changed-22c8d069-e6d1-4644-89d8-516903e4ef3d {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1103.805196] env[68638]: DEBUG nova.compute.manager [req-2bd4f087-83de-4a80-8761-355bc32de186 req-5d2465bc-5c75-4072-ad25-d357ec5117b3 service nova] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Refreshing instance network info cache due to event network-changed-22c8d069-e6d1-4644-89d8-516903e4ef3d. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1103.805438] env[68638]: DEBUG oslo_concurrency.lockutils [req-2bd4f087-83de-4a80-8761-355bc32de186 req-5d2465bc-5c75-4072-ad25-d357ec5117b3 service nova] Acquiring lock "refresh_cache-ba07529b-e6d0-4c22-b938-c4908a7eafd7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1103.805594] env[68638]: DEBUG oslo_concurrency.lockutils [req-2bd4f087-83de-4a80-8761-355bc32de186 req-5d2465bc-5c75-4072-ad25-d357ec5117b3 service nova] Acquired lock "refresh_cache-ba07529b-e6d0-4c22-b938-c4908a7eafd7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1103.805772] env[68638]: DEBUG nova.network.neutron [req-2bd4f087-83de-4a80-8761-355bc32de186 req-5d2465bc-5c75-4072-ad25-d357ec5117b3 service nova] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Refreshing network info cache for port 22c8d069-e6d1-4644-89d8-516903e4ef3d {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1103.827948] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Task: {'id': task-2834350, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.834924] env[68638]: DEBUG oslo_vmware.api [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834348, 'name': PowerOnVM_Task, 'duration_secs': 0.550461} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.835207] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1103.835421] env[68638]: INFO nova.compute.manager [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Took 9.53 seconds to spawn the instance on the hypervisor. [ 1103.835593] env[68638]: DEBUG nova.compute.manager [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1103.836456] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb0549bb-1918-4865-b798-04b79aa09701 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.115662] env[68638]: DEBUG oslo_vmware.api [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834351, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.159192] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834352, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.226551] env[68638]: DEBUG nova.compute.utils [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1104.238637] env[68638]: DEBUG nova.compute.manager [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1104.238637] env[68638]: DEBUG nova.network.neutron [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1104.302415] env[68638]: DEBUG nova.policy [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '815b8ce8a95a4f76a28506fe20117298', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2ae89c3992e04141bf24be9d9e84e302', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1104.325933] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Task: {'id': task-2834350, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524588} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.326316] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] cb8611f1-d987-43f9-bb4e-4b404c952510/cb8611f1-d987-43f9-bb4e-4b404c952510.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1104.326316] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1104.326697] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d0548890-1610-4174-808e-7ccd6fdbd007 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.334881] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Waiting for the task: (returnval){ [ 1104.334881] env[68638]: value = "task-2834353" [ 1104.334881] env[68638]: _type = "Task" [ 1104.334881] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.352490] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Task: {'id': task-2834353, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.357509] env[68638]: INFO nova.compute.manager [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Took 25.68 seconds to build instance. [ 1104.515612] env[68638]: DEBUG nova.network.neutron [req-2bd4f087-83de-4a80-8761-355bc32de186 req-5d2465bc-5c75-4072-ad25-d357ec5117b3 service nova] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Updated VIF entry in instance network info cache for port 22c8d069-e6d1-4644-89d8-516903e4ef3d. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1104.515980] env[68638]: DEBUG nova.network.neutron [req-2bd4f087-83de-4a80-8761-355bc32de186 req-5d2465bc-5c75-4072-ad25-d357ec5117b3 service nova] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Updating instance_info_cache with network_info: [{"id": "22c8d069-e6d1-4644-89d8-516903e4ef3d", "address": "fa:16:3e:07:da:cc", "network": {"id": "e7719a30-81aa-48f1-a272-5246f78d9891", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1890376720-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fdd5447a0546b7b0fe2ed9ea0efc73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22c8d069-e6", "ovs_interfaceid": "22c8d069-e6d1-4644-89d8-516903e4ef3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1104.574026] env[68638]: DEBUG nova.network.neutron [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Successfully created port: 252d498a-0dc8-42d6-9e73-c86004373452 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1104.616203] env[68638]: DEBUG oslo_vmware.api [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834351, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.656587] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834352, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.738911] env[68638]: DEBUG nova.compute.manager [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1104.768405] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 423af2cc-4dea-445f-a01c-6d4d57c3f0de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1104.771431] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 2fa9b930-c76c-4cac-a371-a6b9899dc71e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1104.771657] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 53e92f51-9010-4fb2-89e1-9d16a252ef6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1104.771784] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance df2e066d-7c71-4aec-ab9b-a339a7ff21fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1104.775116] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 7d99d946-f2df-4d31-911f-ac479849b901 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1104.775116] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 0249ffb9-82ed-44db-bb20-e619eaa176dd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1104.775116] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance cc2e9758-45ee-4e94-ad74-ba7d6c85f06d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1104.775116] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance ace44b04-6dcf-4845-af4e-b28ddeebe60e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1104.775116] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1104.775116] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance c66805eb-fd97-4fe3-984d-8759f227d7fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1104.775116] env[68638]: WARNING nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance a8bd64fb-8a07-4edf-a1fb-c2984e4212ec is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1104.775116] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance a576ba6f-5e3b-4408-b95d-2084a072ec12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1104.775116] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1104.775116] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance e0903192-4fa7-437a-9023-33e8e65124e3 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1104.775116] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance cb8611f1-d987-43f9-bb4e-4b404c952510 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1104.775116] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance ba07529b-e6d0-4c22-b938-c4908a7eafd7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1104.775116] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance f9bd4416-b2c3-4bdd-9066-08935d304765 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1104.848470] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Task: {'id': task-2834353, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.859654] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7dbf147c-413e-4e5b-a052-c751834e7327 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.196s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1105.019348] env[68638]: DEBUG oslo_concurrency.lockutils [req-2bd4f087-83de-4a80-8761-355bc32de186 req-5d2465bc-5c75-4072-ad25-d357ec5117b3 service nova] Releasing lock "refresh_cache-ba07529b-e6d0-4c22-b938-c4908a7eafd7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1105.115428] env[68638]: DEBUG oslo_vmware.api [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834351, 'name': ReconfigVM_Task, 'duration_secs': 1.28759} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.115751] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570016', 'volume_id': 'aba426ca-0b6c-4510-8544-7a9bd4b9af38', 'name': 'volume-aba426ca-0b6c-4510-8544-7a9bd4b9af38', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e0903192-4fa7-437a-9023-33e8e65124e3', 'attached_at': '', 'detached_at': '', 'volume_id': 'aba426ca-0b6c-4510-8544-7a9bd4b9af38', 'serial': 'aba426ca-0b6c-4510-8544-7a9bd4b9af38'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1105.116999] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b8e6673a-c173-4055-8a84-e070bf34e84e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.124315] env[68638]: DEBUG oslo_vmware.api [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1105.124315] env[68638]: value = "task-2834354" [ 1105.124315] env[68638]: _type = "Task" [ 1105.124315] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.133724] env[68638]: DEBUG oslo_vmware.api [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834354, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.160611] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834352, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.277936] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 4a0c0188-69bb-441e-a930-ab20be5b2319 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1105.278252] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1105.278403] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3584MB phys_disk=200GB used_disk=15GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1105.346761] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Task: {'id': task-2834353, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.717874} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.347010] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1105.347771] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53fec244-a2a1-45c9-9629-f246984857bb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.371412] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] cb8611f1-d987-43f9-bb4e-4b404c952510/cb8611f1-d987-43f9-bb4e-4b404c952510.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1105.375027] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e2fdeee-8478-4dce-8540-4f562dd42088 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.399570] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Waiting for the task: (returnval){ [ 1105.399570] env[68638]: value = "task-2834355" [ 1105.399570] env[68638]: _type = "Task" [ 1105.399570] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.410538] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Task: {'id': task-2834355, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.433836] env[68638]: DEBUG nova.compute.manager [req-f8e0b673-7b6f-4296-a0b7-e19e2065abd3 req-2556ecb2-dc43-4ea9-ac1c-3f9d67b18006 service nova] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Received event network-changed-fe93833c-d268-4ad4-8246-17c09472e5db {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1105.434766] env[68638]: DEBUG nova.compute.manager [req-f8e0b673-7b6f-4296-a0b7-e19e2065abd3 req-2556ecb2-dc43-4ea9-ac1c-3f9d67b18006 service nova] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Refreshing instance network info cache due to event network-changed-fe93833c-d268-4ad4-8246-17c09472e5db. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1105.434766] env[68638]: DEBUG oslo_concurrency.lockutils [req-f8e0b673-7b6f-4296-a0b7-e19e2065abd3 req-2556ecb2-dc43-4ea9-ac1c-3f9d67b18006 service nova] Acquiring lock "refresh_cache-0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.434766] env[68638]: DEBUG oslo_concurrency.lockutils [req-f8e0b673-7b6f-4296-a0b7-e19e2065abd3 req-2556ecb2-dc43-4ea9-ac1c-3f9d67b18006 service nova] Acquired lock "refresh_cache-0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1105.435176] env[68638]: DEBUG nova.network.neutron [req-f8e0b673-7b6f-4296-a0b7-e19e2065abd3 req-2556ecb2-dc43-4ea9-ac1c-3f9d67b18006 service nova] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Refreshing network info cache for port fe93833c-d268-4ad4-8246-17c09472e5db {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1105.569066] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2f7dbe-1bec-4efa-a92c-4d8e01af9fe2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.578498] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-152e76e1-b1c3-4c26-9aa4-0d246ba73f3c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.608285] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f3ec5b5-9edc-4634-8dd6-de5facc9fc9d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.616283] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71c76929-a4d3-4009-b909-8c992d917af1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.632393] env[68638]: DEBUG nova.compute.provider_tree [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1105.643201] env[68638]: DEBUG oslo_vmware.api [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834354, 'name': Rename_Task, 'duration_secs': 0.154842} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.644167] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1105.644427] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ed17c572-f993-4d62-b028-33ceb0c2e98d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.653557] env[68638]: DEBUG oslo_vmware.api [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1105.653557] env[68638]: value = "task-2834356" [ 1105.653557] env[68638]: _type = "Task" [ 1105.653557] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.660078] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834352, 'name': CreateVM_Task, 'duration_secs': 1.79497} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.660555] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1105.661238] env[68638]: DEBUG oslo_concurrency.lockutils [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.661404] env[68638]: DEBUG oslo_concurrency.lockutils [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1105.661733] env[68638]: DEBUG oslo_concurrency.lockutils [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1105.662226] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e498291-2ed4-407c-8c81-d8db21f383ee {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.666495] env[68638]: DEBUG oslo_vmware.api [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834356, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.670288] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1105.670288] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52b42852-129f-3173-03cf-ebe12d1f92a7" [ 1105.670288] env[68638]: _type = "Task" [ 1105.670288] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.679146] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b42852-129f-3173-03cf-ebe12d1f92a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.755275] env[68638]: DEBUG nova.compute.manager [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1105.783040] env[68638]: DEBUG nova.virt.hardware [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1105.783309] env[68638]: DEBUG nova.virt.hardware [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1105.783474] env[68638]: DEBUG nova.virt.hardware [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1105.783658] env[68638]: DEBUG nova.virt.hardware [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1105.783805] env[68638]: DEBUG nova.virt.hardware [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1105.783949] env[68638]: DEBUG nova.virt.hardware [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1105.784172] env[68638]: DEBUG nova.virt.hardware [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1105.784334] env[68638]: DEBUG nova.virt.hardware [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1105.784506] env[68638]: DEBUG nova.virt.hardware [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1105.784672] env[68638]: DEBUG nova.virt.hardware [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1105.784861] env[68638]: DEBUG nova.virt.hardware [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1105.785753] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30a7477-396c-418c-86ec-4a2fe52b95a8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.794419] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ca1d3ed-6370-48f2-bb2a-289a47d48566 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.910814] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Task: {'id': task-2834355, 'name': ReconfigVM_Task, 'duration_secs': 0.334138} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.911193] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Reconfigured VM instance instance-0000006c to attach disk [datastore1] cb8611f1-d987-43f9-bb4e-4b404c952510/cb8611f1-d987-43f9-bb4e-4b404c952510.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1105.911975] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f0767f8d-265a-46bc-9fa5-738c9fec3072 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.920679] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Waiting for the task: (returnval){ [ 1105.920679] env[68638]: value = "task-2834357" [ 1105.920679] env[68638]: _type = "Task" [ 1105.920679] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.931402] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Task: {'id': task-2834357, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.135449] env[68638]: DEBUG nova.scheduler.client.report [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1106.166125] env[68638]: DEBUG oslo_vmware.api [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834356, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.182249] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b42852-129f-3173-03cf-ebe12d1f92a7, 'name': SearchDatastore_Task, 'duration_secs': 0.010926} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.183161] env[68638]: DEBUG nova.network.neutron [req-f8e0b673-7b6f-4296-a0b7-e19e2065abd3 req-2556ecb2-dc43-4ea9-ac1c-3f9d67b18006 service nova] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Updated VIF entry in instance network info cache for port fe93833c-d268-4ad4-8246-17c09472e5db. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1106.183532] env[68638]: DEBUG nova.network.neutron [req-f8e0b673-7b6f-4296-a0b7-e19e2065abd3 req-2556ecb2-dc43-4ea9-ac1c-3f9d67b18006 service nova] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Updating instance_info_cache with network_info: [{"id": "fe93833c-d268-4ad4-8246-17c09472e5db", "address": "fa:16:3e:aa:eb:f2", "network": {"id": "c31bf1cd-7568-43c6-9d99-a1e4d63a62a6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1277511990-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa16293a678b4a35ac0837f6ce904e48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe93833c-d2", "ovs_interfaceid": "fe93833c-d268-4ad4-8246-17c09472e5db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.184782] env[68638]: DEBUG oslo_concurrency.lockutils [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1106.185015] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1106.185264] env[68638]: DEBUG oslo_concurrency.lockutils [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.185412] env[68638]: DEBUG oslo_concurrency.lockutils [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1106.185590] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1106.186105] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ecdc0e4b-ef4b-449f-bbad-af3a6c27516b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.196709] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1106.196912] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1106.198286] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97e8b25c-10d9-4bdf-94bd-04469083b187 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.204805] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1106.204805] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52559f89-825b-591b-2aad-f6efc831f5d5" [ 1106.204805] env[68638]: _type = "Task" [ 1106.204805] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.213878] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52559f89-825b-591b-2aad-f6efc831f5d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.291600] env[68638]: DEBUG nova.compute.manager [req-fbe02e88-23c0-4001-88b4-7215b6d487ef req-39572769-a8d2-4bda-89d0-f35588ac136a service nova] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Received event network-vif-plugged-252d498a-0dc8-42d6-9e73-c86004373452 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1106.291848] env[68638]: DEBUG oslo_concurrency.lockutils [req-fbe02e88-23c0-4001-88b4-7215b6d487ef req-39572769-a8d2-4bda-89d0-f35588ac136a service nova] Acquiring lock "f9bd4416-b2c3-4bdd-9066-08935d304765-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1106.292182] env[68638]: DEBUG oslo_concurrency.lockutils [req-fbe02e88-23c0-4001-88b4-7215b6d487ef req-39572769-a8d2-4bda-89d0-f35588ac136a service nova] Lock "f9bd4416-b2c3-4bdd-9066-08935d304765-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1106.292430] env[68638]: DEBUG oslo_concurrency.lockutils [req-fbe02e88-23c0-4001-88b4-7215b6d487ef req-39572769-a8d2-4bda-89d0-f35588ac136a service nova] Lock "f9bd4416-b2c3-4bdd-9066-08935d304765-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.292677] env[68638]: DEBUG nova.compute.manager [req-fbe02e88-23c0-4001-88b4-7215b6d487ef req-39572769-a8d2-4bda-89d0-f35588ac136a service nova] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] No waiting events found dispatching network-vif-plugged-252d498a-0dc8-42d6-9e73-c86004373452 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1106.292863] env[68638]: WARNING nova.compute.manager [req-fbe02e88-23c0-4001-88b4-7215b6d487ef req-39572769-a8d2-4bda-89d0-f35588ac136a service nova] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Received unexpected event network-vif-plugged-252d498a-0dc8-42d6-9e73-c86004373452 for instance with vm_state building and task_state spawning. [ 1106.374065] env[68638]: DEBUG nova.network.neutron [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Successfully updated port: 252d498a-0dc8-42d6-9e73-c86004373452 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1106.431207] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Task: {'id': task-2834357, 'name': Rename_Task, 'duration_secs': 0.169085} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.431493] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1106.431747] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2006ae47-be8c-4008-9ac7-a1edeb8effab {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.438797] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Waiting for the task: (returnval){ [ 1106.438797] env[68638]: value = "task-2834358" [ 1106.438797] env[68638]: _type = "Task" [ 1106.438797] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.641155] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68638) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1106.641329] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.920s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.641674] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.467s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1106.642336] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.644188] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.455s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1106.645804] env[68638]: INFO nova.compute.claims [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1106.665542] env[68638]: DEBUG oslo_vmware.api [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834356, 'name': PowerOnVM_Task, 'duration_secs': 0.623923} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.665841] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1106.666105] env[68638]: INFO nova.compute.manager [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Took 8.25 seconds to spawn the instance on the hypervisor. [ 1106.666436] env[68638]: DEBUG nova.compute.manager [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1106.667213] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd65d7d-7ef3-4d0c-b4e0-60eb02f22234 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.670623] env[68638]: INFO nova.scheduler.client.report [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Deleted allocations for instance a8bd64fb-8a07-4edf-a1fb-c2984e4212ec [ 1106.687049] env[68638]: DEBUG oslo_concurrency.lockutils [req-f8e0b673-7b6f-4296-a0b7-e19e2065abd3 req-2556ecb2-dc43-4ea9-ac1c-3f9d67b18006 service nova] Releasing lock "refresh_cache-0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1106.716518] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52559f89-825b-591b-2aad-f6efc831f5d5, 'name': SearchDatastore_Task, 'duration_secs': 0.010777} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.718337] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58648d41-afca-457b-b482-3228444a9b0e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.724834] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1106.724834] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52652a01-3547-c6a5-f18c-a13c93139245" [ 1106.724834] env[68638]: _type = "Task" [ 1106.724834] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.732716] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52652a01-3547-c6a5-f18c-a13c93139245, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.877924] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "refresh_cache-f9bd4416-b2c3-4bdd-9066-08935d304765" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.878098] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired lock "refresh_cache-f9bd4416-b2c3-4bdd-9066-08935d304765" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1106.878273] env[68638]: DEBUG nova.network.neutron [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1106.949215] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Task: {'id': task-2834358, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.187019] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5fc42ac4-4e3d-496d-afc7-fa9eaa174abf tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "a8bd64fb-8a07-4edf-a1fb-c2984e4212ec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.498s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1107.190085] env[68638]: INFO nova.compute.manager [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Took 27.68 seconds to build instance. [ 1107.237936] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52652a01-3547-c6a5-f18c-a13c93139245, 'name': SearchDatastore_Task, 'duration_secs': 0.010042} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.238296] env[68638]: DEBUG oslo_concurrency.lockutils [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1107.238819] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] ba07529b-e6d0-4c22-b938-c4908a7eafd7/ba07529b-e6d0-4c22-b938-c4908a7eafd7.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1107.239257] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-90d95878-6553-4463-8d87-d88343bd8cd2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.248585] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1107.248585] env[68638]: value = "task-2834359" [ 1107.248585] env[68638]: _type = "Task" [ 1107.248585] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.258527] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834359, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.410897] env[68638]: DEBUG nova.network.neutron [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1107.450825] env[68638]: DEBUG oslo_vmware.api [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Task: {'id': task-2834358, 'name': PowerOnVM_Task, 'duration_secs': 0.918497} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.451163] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1107.451380] env[68638]: INFO nova.compute.manager [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Took 8.39 seconds to spawn the instance on the hypervisor. [ 1107.451603] env[68638]: DEBUG nova.compute.manager [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1107.452445] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e446350d-41aa-40c2-a437-f21ddb464061 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.645955] env[68638]: DEBUG nova.network.neutron [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Updating instance_info_cache with network_info: [{"id": "252d498a-0dc8-42d6-9e73-c86004373452", "address": "fa:16:3e:79:49:c1", "network": {"id": "4ccf9e56-9fb3-48ff-bf2d-a35faedb905b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1191830363-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ae89c3992e04141bf24be9d9e84e302", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap252d498a-0d", "ovs_interfaceid": "252d498a-0dc8-42d6-9e73-c86004373452", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.692434] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73801060-09cf-4141-9890-a4e60e74d465 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "e0903192-4fa7-437a-9023-33e8e65124e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.187s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1107.763984] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834359, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.915018] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef5b6ee-5b7d-4e98-891e-d54e2eb3b8a9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.924621] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4398f06-19ef-4eb7-9a7e-f41fac65aea6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.956025] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c4728f-3176-46ed-8e8f-44e8267f0a0b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.968951] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24d980ea-977f-4467-895e-60cc432961a2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.975311] env[68638]: INFO nova.compute.manager [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Took 27.57 seconds to build instance. [ 1107.985596] env[68638]: DEBUG nova.compute.provider_tree [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1108.149593] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Releasing lock "refresh_cache-f9bd4416-b2c3-4bdd-9066-08935d304765" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1108.150263] env[68638]: DEBUG nova.compute.manager [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Instance network_info: |[{"id": "252d498a-0dc8-42d6-9e73-c86004373452", "address": "fa:16:3e:79:49:c1", "network": {"id": "4ccf9e56-9fb3-48ff-bf2d-a35faedb905b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1191830363-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ae89c3992e04141bf24be9d9e84e302", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap252d498a-0d", "ovs_interfaceid": "252d498a-0dc8-42d6-9e73-c86004373452", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1108.150412] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:79:49:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e547d234-640c-449b-8279-0b16f75d6627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '252d498a-0dc8-42d6-9e73-c86004373452', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1108.157865] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1108.158160] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1108.158409] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-17c602b9-19b3-495d-8b48-a9ed2a974381 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.177323] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1108.177554] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1108.184146] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1108.184146] env[68638]: value = "task-2834360" [ 1108.184146] env[68638]: _type = "Task" [ 1108.184146] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.195378] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834360, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.266687] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834359, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.715545} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.266991] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] ba07529b-e6d0-4c22-b938-c4908a7eafd7/ba07529b-e6d0-4c22-b938-c4908a7eafd7.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1108.267508] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1108.268428] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2f80825a-a33d-419e-9133-96c8bb0245df {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.277077] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1108.277077] env[68638]: value = "task-2834361" [ 1108.277077] env[68638]: _type = "Task" [ 1108.277077] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.287790] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834361, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.327313] env[68638]: DEBUG nova.compute.manager [req-b69404b8-9b53-4dfd-9822-977439e4a7b9 req-18dca5f7-e3d8-4809-a5cb-cda6591b55c4 service nova] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Received event network-changed-252d498a-0dc8-42d6-9e73-c86004373452 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1108.327313] env[68638]: DEBUG nova.compute.manager [req-b69404b8-9b53-4dfd-9822-977439e4a7b9 req-18dca5f7-e3d8-4809-a5cb-cda6591b55c4 service nova] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Refreshing instance network info cache due to event network-changed-252d498a-0dc8-42d6-9e73-c86004373452. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1108.329552] env[68638]: DEBUG oslo_concurrency.lockutils [req-b69404b8-9b53-4dfd-9822-977439e4a7b9 req-18dca5f7-e3d8-4809-a5cb-cda6591b55c4 service nova] Acquiring lock "refresh_cache-f9bd4416-b2c3-4bdd-9066-08935d304765" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.329552] env[68638]: DEBUG oslo_concurrency.lockutils [req-b69404b8-9b53-4dfd-9822-977439e4a7b9 req-18dca5f7-e3d8-4809-a5cb-cda6591b55c4 service nova] Acquired lock "refresh_cache-f9bd4416-b2c3-4bdd-9066-08935d304765" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1108.329552] env[68638]: DEBUG nova.network.neutron [req-b69404b8-9b53-4dfd-9822-977439e4a7b9 req-18dca5f7-e3d8-4809-a5cb-cda6591b55c4 service nova] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Refreshing network info cache for port 252d498a-0dc8-42d6-9e73-c86004373452 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1108.408941] env[68638]: DEBUG nova.compute.manager [req-2870904a-7d20-44a2-aeb7-1be7a6f01092 req-473cc053-1515-4579-95ca-6b0fff5757c1 service nova] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Received event network-changed-83c2852d-0228-4c4e-b754-0dc81d6b8a11 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1108.409341] env[68638]: DEBUG nova.compute.manager [req-2870904a-7d20-44a2-aeb7-1be7a6f01092 req-473cc053-1515-4579-95ca-6b0fff5757c1 service nova] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Refreshing instance network info cache due to event network-changed-83c2852d-0228-4c4e-b754-0dc81d6b8a11. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1108.409495] env[68638]: DEBUG oslo_concurrency.lockutils [req-2870904a-7d20-44a2-aeb7-1be7a6f01092 req-473cc053-1515-4579-95ca-6b0fff5757c1 service nova] Acquiring lock "refresh_cache-423af2cc-4dea-445f-a01c-6d4d57c3f0de" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.409560] env[68638]: DEBUG oslo_concurrency.lockutils [req-2870904a-7d20-44a2-aeb7-1be7a6f01092 req-473cc053-1515-4579-95ca-6b0fff5757c1 service nova] Acquired lock "refresh_cache-423af2cc-4dea-445f-a01c-6d4d57c3f0de" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1108.409682] env[68638]: DEBUG nova.network.neutron [req-2870904a-7d20-44a2-aeb7-1be7a6f01092 req-473cc053-1515-4579-95ca-6b0fff5757c1 service nova] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Refreshing network info cache for port 83c2852d-0228-4c4e-b754-0dc81d6b8a11 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1108.478079] env[68638]: DEBUG oslo_concurrency.lockutils [None req-747e02ea-4dfd-4843-bf03-00e7888b83fb tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Lock "cb8611f1-d987-43f9-bb4e-4b404c952510" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.079s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.488726] env[68638]: DEBUG nova.scheduler.client.report [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1108.681072] env[68638]: DEBUG nova.compute.manager [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1108.695052] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834360, 'name': CreateVM_Task, 'duration_secs': 0.397286} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.695191] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1108.695929] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.696168] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1108.696566] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1108.697192] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-380fecc0-00b8-4c08-8c1f-68432e3b5d07 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.704031] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1108.704031] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ae9235-cd23-b8b6-b4ce-4224d83c08cf" [ 1108.704031] env[68638]: _type = "Task" [ 1108.704031] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.715571] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ae9235-cd23-b8b6-b4ce-4224d83c08cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.786443] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834361, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089276} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.786810] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1108.787655] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c47f9a-e2a0-4166-b163-9f0f09ed34ac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.812360] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] ba07529b-e6d0-4c22-b938-c4908a7eafd7/ba07529b-e6d0-4c22-b938-c4908a7eafd7.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1108.812737] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9bc385a-94d0-429f-b944-22a91ed30a70 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.834882] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1108.834882] env[68638]: value = "task-2834362" [ 1108.834882] env[68638]: _type = "Task" [ 1108.834882] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.845077] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834362, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.996998] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.353s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.997603] env[68638]: DEBUG nova.compute.manager [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1109.092935] env[68638]: DEBUG nova.network.neutron [req-b69404b8-9b53-4dfd-9822-977439e4a7b9 req-18dca5f7-e3d8-4809-a5cb-cda6591b55c4 service nova] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Updated VIF entry in instance network info cache for port 252d498a-0dc8-42d6-9e73-c86004373452. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1109.093377] env[68638]: DEBUG nova.network.neutron [req-b69404b8-9b53-4dfd-9822-977439e4a7b9 req-18dca5f7-e3d8-4809-a5cb-cda6591b55c4 service nova] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Updating instance_info_cache with network_info: [{"id": "252d498a-0dc8-42d6-9e73-c86004373452", "address": "fa:16:3e:79:49:c1", "network": {"id": "4ccf9e56-9fb3-48ff-bf2d-a35faedb905b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1191830363-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ae89c3992e04141bf24be9d9e84e302", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap252d498a-0d", "ovs_interfaceid": "252d498a-0dc8-42d6-9e73-c86004373452", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.158923] env[68638]: DEBUG nova.network.neutron [req-2870904a-7d20-44a2-aeb7-1be7a6f01092 req-473cc053-1515-4579-95ca-6b0fff5757c1 service nova] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Updated VIF entry in instance network info cache for port 83c2852d-0228-4c4e-b754-0dc81d6b8a11. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1109.159456] env[68638]: DEBUG nova.network.neutron [req-2870904a-7d20-44a2-aeb7-1be7a6f01092 req-473cc053-1515-4579-95ca-6b0fff5757c1 service nova] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Updating instance_info_cache with network_info: [{"id": "83c2852d-0228-4c4e-b754-0dc81d6b8a11", "address": "fa:16:3e:27:30:e5", "network": {"id": "2181efd7-a094-4c4b-8754-da82e89be85a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1274773453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "efa342b9d9a34e9e8e708c8f356f905e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83c2852d-02", "ovs_interfaceid": "83c2852d-0228-4c4e-b754-0dc81d6b8a11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.200621] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1109.200907] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1109.202844] env[68638]: INFO nova.compute.claims [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1109.215428] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ae9235-cd23-b8b6-b4ce-4224d83c08cf, 'name': SearchDatastore_Task, 'duration_secs': 0.012981} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.215699] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1109.215928] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1109.216185] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.216334] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1109.216541] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1109.216806] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f8cacf38-1721-47da-9bf6-e3c617577cf2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.226996] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1109.227205] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1109.227937] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-362d97a8-27fd-4aee-96e7-5a5985b34f5c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.234236] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1109.234236] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52a987a4-1ce0-d9db-b6a2-feb2fd1e1b63" [ 1109.234236] env[68638]: _type = "Task" [ 1109.234236] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.243291] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a987a4-1ce0-d9db-b6a2-feb2fd1e1b63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.345403] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834362, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.412534] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Acquiring lock "cb8611f1-d987-43f9-bb4e-4b404c952510" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1109.412782] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Lock "cb8611f1-d987-43f9-bb4e-4b404c952510" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1109.413580] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Acquiring lock "cb8611f1-d987-43f9-bb4e-4b404c952510-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1109.413580] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Lock "cb8611f1-d987-43f9-bb4e-4b404c952510-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1109.413580] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Lock "cb8611f1-d987-43f9-bb4e-4b404c952510-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1109.416219] env[68638]: INFO nova.compute.manager [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Terminating instance [ 1109.503243] env[68638]: DEBUG nova.compute.utils [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1109.504912] env[68638]: DEBUG nova.compute.manager [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1109.509031] env[68638]: DEBUG nova.network.neutron [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1109.558176] env[68638]: DEBUG nova.policy [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '065b6acad70941b0b43b568d1b2f72b6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8938cbcafe93492e8f53613d992790bf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1109.600376] env[68638]: DEBUG oslo_concurrency.lockutils [req-b69404b8-9b53-4dfd-9822-977439e4a7b9 req-18dca5f7-e3d8-4809-a5cb-cda6591b55c4 service nova] Releasing lock "refresh_cache-f9bd4416-b2c3-4bdd-9066-08935d304765" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1109.661848] env[68638]: DEBUG oslo_concurrency.lockutils [req-2870904a-7d20-44a2-aeb7-1be7a6f01092 req-473cc053-1515-4579-95ca-6b0fff5757c1 service nova] Releasing lock "refresh_cache-423af2cc-4dea-445f-a01c-6d4d57c3f0de" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1109.747209] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a987a4-1ce0-d9db-b6a2-feb2fd1e1b63, 'name': SearchDatastore_Task, 'duration_secs': 0.013858} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.748095] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59e1e09f-d213-4439-befc-0270022b2077 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.754326] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1109.754326] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]523739b2-d41a-3713-7475-1c19c7e31040" [ 1109.754326] env[68638]: _type = "Task" [ 1109.754326] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.762894] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523739b2-d41a-3713-7475-1c19c7e31040, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.842492] env[68638]: DEBUG nova.network.neutron [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Successfully created port: f587042f-8bad-458a-9cd7-16e741d597ca {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1109.848202] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834362, 'name': ReconfigVM_Task, 'duration_secs': 0.576509} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.848471] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Reconfigured VM instance instance-0000006d to attach disk [datastore1] ba07529b-e6d0-4c22-b938-c4908a7eafd7/ba07529b-e6d0-4c22-b938-c4908a7eafd7.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1109.849410] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-525bd978-b613-4e91-a859-cb7f85b9e410 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.857094] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1109.857094] env[68638]: value = "task-2834363" [ 1109.857094] env[68638]: _type = "Task" [ 1109.857094] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.867033] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834363, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.922046] env[68638]: DEBUG nova.compute.manager [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1109.922046] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1109.922370] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4111858d-11ef-47af-b906-26baa3ddc290 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.931898] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1109.932171] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d82fc8fc-aecc-4851-8a6f-62e051245071 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.939196] env[68638]: DEBUG oslo_vmware.api [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Waiting for the task: (returnval){ [ 1109.939196] env[68638]: value = "task-2834364" [ 1109.939196] env[68638]: _type = "Task" [ 1109.939196] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.948673] env[68638]: DEBUG oslo_vmware.api [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Task: {'id': task-2834364, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.009436] env[68638]: DEBUG nova.compute.manager [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1110.105154] env[68638]: DEBUG nova.compute.manager [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Stashing vm_state: active {{(pid=68638) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1110.266896] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523739b2-d41a-3713-7475-1c19c7e31040, 'name': SearchDatastore_Task, 'duration_secs': 0.011311} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.269808] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1110.270126] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] f9bd4416-b2c3-4bdd-9066-08935d304765/f9bd4416-b2c3-4bdd-9066-08935d304765.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1110.270667] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-81471671-fc2b-4c10-96f3-fc19ce407155 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.285210] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1110.285210] env[68638]: value = "task-2834365" [ 1110.285210] env[68638]: _type = "Task" [ 1110.285210] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.294361] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834365, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.367492] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834363, 'name': Rename_Task, 'duration_secs': 0.173041} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.367871] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1110.368080] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc1b9aea-55c9-44ec-8c21-f84eae6301d0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.378113] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1110.378113] env[68638]: value = "task-2834366" [ 1110.378113] env[68638]: _type = "Task" [ 1110.378113] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.387888] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834366, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.437910] env[68638]: DEBUG nova.compute.manager [req-1aa93c36-13bc-4d78-bb6d-91bb37eeb0e1 req-447eff81-4eeb-45e6-a259-8ee59606cc0d service nova] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Received event network-changed-ef048785-d375-47e3-9f3c-2f26fd1bb175 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1110.438122] env[68638]: DEBUG nova.compute.manager [req-1aa93c36-13bc-4d78-bb6d-91bb37eeb0e1 req-447eff81-4eeb-45e6-a259-8ee59606cc0d service nova] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Refreshing instance network info cache due to event network-changed-ef048785-d375-47e3-9f3c-2f26fd1bb175. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1110.438458] env[68638]: DEBUG oslo_concurrency.lockutils [req-1aa93c36-13bc-4d78-bb6d-91bb37eeb0e1 req-447eff81-4eeb-45e6-a259-8ee59606cc0d service nova] Acquiring lock "refresh_cache-e0903192-4fa7-437a-9023-33e8e65124e3" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.438458] env[68638]: DEBUG oslo_concurrency.lockutils [req-1aa93c36-13bc-4d78-bb6d-91bb37eeb0e1 req-447eff81-4eeb-45e6-a259-8ee59606cc0d service nova] Acquired lock "refresh_cache-e0903192-4fa7-437a-9023-33e8e65124e3" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1110.438625] env[68638]: DEBUG nova.network.neutron [req-1aa93c36-13bc-4d78-bb6d-91bb37eeb0e1 req-447eff81-4eeb-45e6-a259-8ee59606cc0d service nova] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Refreshing network info cache for port ef048785-d375-47e3-9f3c-2f26fd1bb175 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1110.452898] env[68638]: DEBUG oslo_vmware.api [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Task: {'id': task-2834364, 'name': PowerOffVM_Task, 'duration_secs': 0.425535} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.453204] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1110.453354] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1110.453618] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ccd33f89-58af-40a7-bcda-5a5d3492976f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.490126] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dffa31ba-4d33-46f6-a5f4-4a5bdc04e1b5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.502647] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5554c81-4e3d-4f83-8f5e-180a61308174 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.545361] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2540d2-7baa-4ed4-b685-3de826c04765 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.548478] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1110.548707] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1110.548917] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Deleting the datastore file [datastore1] cb8611f1-d987-43f9-bb4e-4b404c952510 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1110.549213] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc3d8bf4-164c-4d99-bc84-98df13aae03a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.559253] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14c2cae6-708f-4a28-9ee3-42aba99e1d8d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.563932] env[68638]: DEBUG oslo_vmware.api [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Waiting for the task: (returnval){ [ 1110.563932] env[68638]: value = "task-2834368" [ 1110.563932] env[68638]: _type = "Task" [ 1110.563932] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.578837] env[68638]: DEBUG nova.compute.provider_tree [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1110.623829] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1110.795959] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834365, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.889452] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834366, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.047535] env[68638]: DEBUG nova.compute.manager [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1111.078756] env[68638]: DEBUG oslo_vmware.api [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Task: {'id': task-2834368, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.386002} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.082025] env[68638]: DEBUG nova.virt.hardware [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1111.082448] env[68638]: DEBUG nova.virt.hardware [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1111.085027] env[68638]: DEBUG nova.virt.hardware [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1111.085027] env[68638]: DEBUG nova.virt.hardware [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1111.085027] env[68638]: DEBUG nova.virt.hardware [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1111.085027] env[68638]: DEBUG nova.virt.hardware [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1111.085027] env[68638]: DEBUG nova.virt.hardware [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1111.085027] env[68638]: DEBUG nova.virt.hardware [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1111.085027] env[68638]: DEBUG nova.virt.hardware [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1111.085027] env[68638]: DEBUG nova.virt.hardware [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1111.085027] env[68638]: DEBUG nova.virt.hardware [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1111.085027] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1111.085027] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1111.085027] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1111.085027] env[68638]: INFO nova.compute.manager [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1111.085027] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1111.086540] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c13974-01ae-4adc-9422-df0ecbcd5a03 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.089997] env[68638]: DEBUG nova.scheduler.client.report [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1111.093267] env[68638]: DEBUG nova.compute.manager [-] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1111.093472] env[68638]: DEBUG nova.network.neutron [-] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1111.104068] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44709ad0-6ce3-4a43-be89-9c206bef89ec {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.179743] env[68638]: DEBUG oslo_concurrency.lockutils [None req-68545ff0-3063-4b2b-8d2a-e85020bd2de0 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "ace44b04-6dcf-4845-af4e-b28ddeebe60e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.179994] env[68638]: DEBUG oslo_concurrency.lockutils [None req-68545ff0-3063-4b2b-8d2a-e85020bd2de0 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "ace44b04-6dcf-4845-af4e-b28ddeebe60e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.226916] env[68638]: DEBUG nova.network.neutron [req-1aa93c36-13bc-4d78-bb6d-91bb37eeb0e1 req-447eff81-4eeb-45e6-a259-8ee59606cc0d service nova] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Updated VIF entry in instance network info cache for port ef048785-d375-47e3-9f3c-2f26fd1bb175. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1111.227351] env[68638]: DEBUG nova.network.neutron [req-1aa93c36-13bc-4d78-bb6d-91bb37eeb0e1 req-447eff81-4eeb-45e6-a259-8ee59606cc0d service nova] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Updating instance_info_cache with network_info: [{"id": "ef048785-d375-47e3-9f3c-2f26fd1bb175", "address": "fa:16:3e:65:11:44", "network": {"id": "2181efd7-a094-4c4b-8754-da82e89be85a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1274773453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "efa342b9d9a34e9e8e708c8f356f905e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef048785-d3", "ovs_interfaceid": "ef048785-d375-47e3-9f3c-2f26fd1bb175", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.296975] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834365, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565542} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.297641] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] f9bd4416-b2c3-4bdd-9066-08935d304765/f9bd4416-b2c3-4bdd-9066-08935d304765.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1111.297897] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1111.298237] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fcc99270-9fce-406c-844b-80c3e5750321 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.306478] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1111.306478] env[68638]: value = "task-2834369" [ 1111.306478] env[68638]: _type = "Task" [ 1111.306478] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.317960] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834369, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.390566] env[68638]: DEBUG oslo_vmware.api [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834366, 'name': PowerOnVM_Task, 'duration_secs': 0.625924} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.390918] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1111.391193] env[68638]: INFO nova.compute.manager [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Took 9.94 seconds to spawn the instance on the hypervisor. [ 1111.391544] env[68638]: DEBUG nova.compute.manager [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1111.392328] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13982602-53e5-4f6e-8a97-934a77da4d62 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.594271] env[68638]: DEBUG nova.network.neutron [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Successfully updated port: f587042f-8bad-458a-9cd7-16e741d597ca {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1111.595974] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.395s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.596501] env[68638]: DEBUG nova.compute.manager [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1111.599574] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.976s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.684604] env[68638]: DEBUG nova.compute.utils [None req-68545ff0-3063-4b2b-8d2a-e85020bd2de0 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1111.733198] env[68638]: DEBUG oslo_concurrency.lockutils [req-1aa93c36-13bc-4d78-bb6d-91bb37eeb0e1 req-447eff81-4eeb-45e6-a259-8ee59606cc0d service nova] Releasing lock "refresh_cache-e0903192-4fa7-437a-9023-33e8e65124e3" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1111.817287] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834369, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084344} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.817555] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1111.818422] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce8d0c64-d11c-46cc-9086-7327627fcaf0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.843666] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] f9bd4416-b2c3-4bdd-9066-08935d304765/f9bd4416-b2c3-4bdd-9066-08935d304765.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1111.843953] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22237f5d-72f7-48d1-9082-d733c3034d59 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.864772] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1111.864772] env[68638]: value = "task-2834370" [ 1111.864772] env[68638]: _type = "Task" [ 1111.864772] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.873083] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834370, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.914316] env[68638]: INFO nova.compute.manager [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Took 31.05 seconds to build instance. [ 1111.977930] env[68638]: DEBUG nova.network.neutron [-] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.100796] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquiring lock "refresh_cache-4a0c0188-69bb-441e-a930-ab20be5b2319" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.100958] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquired lock "refresh_cache-4a0c0188-69bb-441e-a930-ab20be5b2319" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1112.101129] env[68638]: DEBUG nova.network.neutron [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1112.103571] env[68638]: DEBUG nova.compute.utils [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1112.106219] env[68638]: INFO nova.compute.claims [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1112.112140] env[68638]: DEBUG nova.compute.manager [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1112.112646] env[68638]: DEBUG nova.network.neutron [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1112.185922] env[68638]: DEBUG nova.network.neutron [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1112.188352] env[68638]: DEBUG oslo_concurrency.lockutils [None req-68545ff0-3063-4b2b-8d2a-e85020bd2de0 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "ace44b04-6dcf-4845-af4e-b28ddeebe60e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.201104] env[68638]: DEBUG nova.policy [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '847f535ec96f4ef0b73ae277199b4533', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98a35cb6ae4d4c8688fb89d7da0b2dd1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1112.375812] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834370, 'name': ReconfigVM_Task, 'duration_secs': 0.297879} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.376138] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Reconfigured VM instance instance-0000006e to attach disk [datastore1] f9bd4416-b2c3-4bdd-9066-08935d304765/f9bd4416-b2c3-4bdd-9066-08935d304765.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1112.376826] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b3a7ce36-ee26-4791-919b-3c6ad4680bbc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.384632] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1112.384632] env[68638]: value = "task-2834371" [ 1112.384632] env[68638]: _type = "Task" [ 1112.384632] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.392769] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834371, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.416709] env[68638]: DEBUG oslo_concurrency.lockutils [None req-765af1dc-0d6c-4edf-a0de-87b9aec2582d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "ba07529b-e6d0-4c22-b938-c4908a7eafd7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.561s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.480518] env[68638]: INFO nova.compute.manager [-] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Took 1.39 seconds to deallocate network for instance. [ 1112.494930] env[68638]: DEBUG nova.network.neutron [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Updating instance_info_cache with network_info: [{"id": "f587042f-8bad-458a-9cd7-16e741d597ca", "address": "fa:16:3e:d6:4b:7c", "network": {"id": "72c025a9-b352-4718-9ffb-469abb0f7099", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1791072145-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8938cbcafe93492e8f53613d992790bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf587042f-8b", "ovs_interfaceid": "f587042f-8bad-458a-9cd7-16e741d597ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.577252] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fa858a42-b83e-4f08-b11b-a622ba922303 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "0249ffb9-82ed-44db-bb20-e619eaa176dd" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.577520] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fa858a42-b83e-4f08-b11b-a622ba922303 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "0249ffb9-82ed-44db-bb20-e619eaa176dd" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.610821] env[68638]: DEBUG nova.compute.manager [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1112.617500] env[68638]: INFO nova.compute.resource_tracker [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Updating resource usage from migration 810edb53-f308-46ed-9eb8-5991ced4eb1c [ 1112.706023] env[68638]: DEBUG nova.compute.manager [req-167f571c-fdea-45e7-aa06-312a11fae412 req-9dbed11e-fee5-490e-b9bb-2b046601c3e9 service nova] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Received event network-vif-deleted-ddf9710f-5767-4215-876a-d304d09d0b36 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1112.706241] env[68638]: DEBUG nova.compute.manager [req-167f571c-fdea-45e7-aa06-312a11fae412 req-9dbed11e-fee5-490e-b9bb-2b046601c3e9 service nova] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Received event network-vif-plugged-f587042f-8bad-458a-9cd7-16e741d597ca {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1112.706443] env[68638]: DEBUG oslo_concurrency.lockutils [req-167f571c-fdea-45e7-aa06-312a11fae412 req-9dbed11e-fee5-490e-b9bb-2b046601c3e9 service nova] Acquiring lock "4a0c0188-69bb-441e-a930-ab20be5b2319-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.706674] env[68638]: DEBUG oslo_concurrency.lockutils [req-167f571c-fdea-45e7-aa06-312a11fae412 req-9dbed11e-fee5-490e-b9bb-2b046601c3e9 service nova] Lock "4a0c0188-69bb-441e-a930-ab20be5b2319-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.707141] env[68638]: DEBUG oslo_concurrency.lockutils [req-167f571c-fdea-45e7-aa06-312a11fae412 req-9dbed11e-fee5-490e-b9bb-2b046601c3e9 service nova] Lock "4a0c0188-69bb-441e-a930-ab20be5b2319-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.707141] env[68638]: DEBUG nova.compute.manager [req-167f571c-fdea-45e7-aa06-312a11fae412 req-9dbed11e-fee5-490e-b9bb-2b046601c3e9 service nova] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] No waiting events found dispatching network-vif-plugged-f587042f-8bad-458a-9cd7-16e741d597ca {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1112.711454] env[68638]: WARNING nova.compute.manager [req-167f571c-fdea-45e7-aa06-312a11fae412 req-9dbed11e-fee5-490e-b9bb-2b046601c3e9 service nova] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Received unexpected event network-vif-plugged-f587042f-8bad-458a-9cd7-16e741d597ca for instance with vm_state building and task_state spawning. [ 1112.711454] env[68638]: DEBUG nova.compute.manager [req-167f571c-fdea-45e7-aa06-312a11fae412 req-9dbed11e-fee5-490e-b9bb-2b046601c3e9 service nova] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Received event network-changed-f587042f-8bad-458a-9cd7-16e741d597ca {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1112.711454] env[68638]: DEBUG nova.compute.manager [req-167f571c-fdea-45e7-aa06-312a11fae412 req-9dbed11e-fee5-490e-b9bb-2b046601c3e9 service nova] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Refreshing instance network info cache due to event network-changed-f587042f-8bad-458a-9cd7-16e741d597ca. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1112.711454] env[68638]: DEBUG oslo_concurrency.lockutils [req-167f571c-fdea-45e7-aa06-312a11fae412 req-9dbed11e-fee5-490e-b9bb-2b046601c3e9 service nova] Acquiring lock "refresh_cache-4a0c0188-69bb-441e-a930-ab20be5b2319" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.725635] env[68638]: DEBUG nova.network.neutron [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Successfully created port: efe8e56e-2095-427b-ab7a-31cbdb0521ca {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1112.890612] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a1792dc-7d9c-4bca-b184-6abf75e2711e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.896352] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834371, 'name': Rename_Task, 'duration_secs': 0.190224} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.897017] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1112.897263] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a83af85e-6231-4255-a327-6155906cfbdf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.901698] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd13529d-819b-41b1-912a-abc750385f90 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.906853] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1112.906853] env[68638]: value = "task-2834372" [ 1112.906853] env[68638]: _type = "Task" [ 1112.906853] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.916804] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834372, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.943972] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06bb5669-bbbe-45b6-a0de-d037332ac5cc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.957697] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c74645-3766-4ebe-82ba-15b121371109 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.974605] env[68638]: DEBUG nova.compute.provider_tree [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1112.988410] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.999631] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Releasing lock "refresh_cache-4a0c0188-69bb-441e-a930-ab20be5b2319" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1112.999988] env[68638]: DEBUG nova.compute.manager [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Instance network_info: |[{"id": "f587042f-8bad-458a-9cd7-16e741d597ca", "address": "fa:16:3e:d6:4b:7c", "network": {"id": "72c025a9-b352-4718-9ffb-469abb0f7099", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1791072145-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8938cbcafe93492e8f53613d992790bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf587042f-8b", "ovs_interfaceid": "f587042f-8bad-458a-9cd7-16e741d597ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1113.000300] env[68638]: DEBUG oslo_concurrency.lockutils [req-167f571c-fdea-45e7-aa06-312a11fae412 req-9dbed11e-fee5-490e-b9bb-2b046601c3e9 service nova] Acquired lock "refresh_cache-4a0c0188-69bb-441e-a930-ab20be5b2319" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1113.000498] env[68638]: DEBUG nova.network.neutron [req-167f571c-fdea-45e7-aa06-312a11fae412 req-9dbed11e-fee5-490e-b9bb-2b046601c3e9 service nova] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Refreshing network info cache for port f587042f-8bad-458a-9cd7-16e741d597ca {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1113.005022] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:4b:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccf76700-491b-4462-ab19-e6d3a9ff87ac', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f587042f-8bad-458a-9cd7-16e741d597ca', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1113.010236] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1113.011057] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1113.011298] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-58ac2b9a-3be2-48ba-ad4f-6215f39d9db1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.033908] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1113.033908] env[68638]: value = "task-2834373" [ 1113.033908] env[68638]: _type = "Task" [ 1113.033908] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.045106] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834373, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.081099] env[68638]: INFO nova.compute.manager [None req-fa858a42-b83e-4f08-b11b-a622ba922303 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Detaching volume e62ecc96-280f-49b1-b4a1-915281c6d7c5 [ 1113.121400] env[68638]: INFO nova.virt.block_device [None req-fa858a42-b83e-4f08-b11b-a622ba922303 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Attempting to driver detach volume e62ecc96-280f-49b1-b4a1-915281c6d7c5 from mountpoint /dev/sdb [ 1113.121702] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa858a42-b83e-4f08-b11b-a622ba922303 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Volume detach. Driver type: vmdk {{(pid=68638) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1113.121968] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa858a42-b83e-4f08-b11b-a622ba922303 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570005', 'volume_id': 'e62ecc96-280f-49b1-b4a1-915281c6d7c5', 'name': 'volume-e62ecc96-280f-49b1-b4a1-915281c6d7c5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '0249ffb9-82ed-44db-bb20-e619eaa176dd', 'attached_at': '', 'detached_at': '', 'volume_id': 'e62ecc96-280f-49b1-b4a1-915281c6d7c5', 'serial': 'e62ecc96-280f-49b1-b4a1-915281c6d7c5'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1113.122911] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d731fc-0230-42ab-8cd0-e27143ac09bd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.146450] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-250cd988-a478-40d1-b5bb-20ae0d13a240 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.154965] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7666fa35-c54c-4206-b2dd-8d3c00fbc6ff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.178267] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ea4c54-41fc-494e-8abb-29148f110a1a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.196189] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa858a42-b83e-4f08-b11b-a622ba922303 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] The volume has not been displaced from its original location: [datastore2] volume-e62ecc96-280f-49b1-b4a1-915281c6d7c5/volume-e62ecc96-280f-49b1-b4a1-915281c6d7c5.vmdk. No consolidation needed. {{(pid=68638) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1113.201772] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa858a42-b83e-4f08-b11b-a622ba922303 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Reconfiguring VM instance instance-00000050 to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1113.202140] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-afc653bf-4f9b-4697-8b59-233ce7938cd3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.224492] env[68638]: DEBUG oslo_vmware.api [None req-fa858a42-b83e-4f08-b11b-a622ba922303 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1113.224492] env[68638]: value = "task-2834374" [ 1113.224492] env[68638]: _type = "Task" [ 1113.224492] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.234322] env[68638]: DEBUG oslo_vmware.api [None req-fa858a42-b83e-4f08-b11b-a622ba922303 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834374, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.276052] env[68638]: DEBUG oslo_concurrency.lockutils [None req-68545ff0-3063-4b2b-8d2a-e85020bd2de0 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "ace44b04-6dcf-4845-af4e-b28ddeebe60e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.276419] env[68638]: DEBUG oslo_concurrency.lockutils [None req-68545ff0-3063-4b2b-8d2a-e85020bd2de0 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "ace44b04-6dcf-4845-af4e-b28ddeebe60e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.276964] env[68638]: INFO nova.compute.manager [None req-68545ff0-3063-4b2b-8d2a-e85020bd2de0 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Attaching volume 66d78f72-80b8-487d-8315-0d99a3f6172d to /dev/sdb [ 1113.319909] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e957fd2-fb52-432c-bc22-3c210c63f3bb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.330238] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e80bb084-f129-4609-9c81-b83485634e33 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.347803] env[68638]: DEBUG nova.virt.block_device [None req-68545ff0-3063-4b2b-8d2a-e85020bd2de0 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Updating existing volume attachment record: 614ba91a-ce88-4662-b691-27c677b63667 {{(pid=68638) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1113.420231] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834372, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.479041] env[68638]: DEBUG nova.scheduler.client.report [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1113.547347] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834373, 'name': CreateVM_Task, 'duration_secs': 0.407715} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.547609] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1113.548412] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.548642] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1113.549029] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1113.549349] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba6a9e5e-5d6b-4e2b-8635-ac2fdd18cd6f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.557617] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1113.557617] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52b6c581-ac3a-8ecb-a8ff-fa11a7a822b7" [ 1113.557617] env[68638]: _type = "Task" [ 1113.557617] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.568149] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b6c581-ac3a-8ecb-a8ff-fa11a7a822b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.628316] env[68638]: DEBUG nova.compute.manager [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1113.665628] env[68638]: DEBUG nova.virt.hardware [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1113.666541] env[68638]: DEBUG nova.virt.hardware [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1113.666541] env[68638]: DEBUG nova.virt.hardware [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1113.666699] env[68638]: DEBUG nova.virt.hardware [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1113.666948] env[68638]: DEBUG nova.virt.hardware [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1113.667507] env[68638]: DEBUG nova.virt.hardware [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1113.667966] env[68638]: DEBUG nova.virt.hardware [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1113.668476] env[68638]: DEBUG nova.virt.hardware [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1113.668476] env[68638]: DEBUG nova.virt.hardware [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1113.668738] env[68638]: DEBUG nova.virt.hardware [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1113.669066] env[68638]: DEBUG nova.virt.hardware [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1113.671442] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d9c5c8-c318-4097-9fd8-75cf5cb4b33d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.682152] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec021439-9038-4f13-a2b8-9d85367b7651 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.735546] env[68638]: DEBUG oslo_vmware.api [None req-fa858a42-b83e-4f08-b11b-a622ba922303 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834374, 'name': ReconfigVM_Task, 'duration_secs': 0.332454} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.735847] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa858a42-b83e-4f08-b11b-a622ba922303 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Reconfigured VM instance instance-00000050 to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1113.740952] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3eb65038-e615-42c8-8ca9-faa06f4c1171 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.758313] env[68638]: DEBUG oslo_vmware.api [None req-fa858a42-b83e-4f08-b11b-a622ba922303 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1113.758313] env[68638]: value = "task-2834378" [ 1113.758313] env[68638]: _type = "Task" [ 1113.758313] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.767699] env[68638]: DEBUG oslo_vmware.api [None req-fa858a42-b83e-4f08-b11b-a622ba922303 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834378, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.913093] env[68638]: DEBUG nova.network.neutron [req-167f571c-fdea-45e7-aa06-312a11fae412 req-9dbed11e-fee5-490e-b9bb-2b046601c3e9 service nova] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Updated VIF entry in instance network info cache for port f587042f-8bad-458a-9cd7-16e741d597ca. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1113.913525] env[68638]: DEBUG nova.network.neutron [req-167f571c-fdea-45e7-aa06-312a11fae412 req-9dbed11e-fee5-490e-b9bb-2b046601c3e9 service nova] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Updating instance_info_cache with network_info: [{"id": "f587042f-8bad-458a-9cd7-16e741d597ca", "address": "fa:16:3e:d6:4b:7c", "network": {"id": "72c025a9-b352-4718-9ffb-469abb0f7099", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1791072145-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8938cbcafe93492e8f53613d992790bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf587042f-8b", "ovs_interfaceid": "f587042f-8bad-458a-9cd7-16e741d597ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.922092] env[68638]: DEBUG oslo_vmware.api [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834372, 'name': PowerOnVM_Task, 'duration_secs': 0.612464} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.922474] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1113.922647] env[68638]: INFO nova.compute.manager [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Took 8.17 seconds to spawn the instance on the hypervisor. [ 1113.922845] env[68638]: DEBUG nova.compute.manager [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1113.924402] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e26c3dd-422a-4d0d-bbbb-8a1771092dc3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.987989] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.388s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.988239] env[68638]: INFO nova.compute.manager [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Migrating [ 1113.997344] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.009s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.997668] env[68638]: DEBUG nova.objects.instance [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Lazy-loading 'resources' on Instance uuid cb8611f1-d987-43f9-bb4e-4b404c952510 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1114.073636] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b6c581-ac3a-8ecb-a8ff-fa11a7a822b7, 'name': SearchDatastore_Task, 'duration_secs': 0.014443} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.076443] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1114.076744] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1114.077024] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.077188] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1114.077400] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1114.077969] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3900f9b2-507d-4ee0-9355-09f852113d01 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.092226] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1114.092226] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1114.092918] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64ad3681-f9b5-4cb2-8925-7c3c5f75070b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.100061] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1114.100061] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52f456a7-4f71-3b74-7dec-8a91cf9a59c3" [ 1114.100061] env[68638]: _type = "Task" [ 1114.100061] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.112075] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f456a7-4f71-3b74-7dec-8a91cf9a59c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.269864] env[68638]: DEBUG oslo_vmware.api [None req-fa858a42-b83e-4f08-b11b-a622ba922303 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834378, 'name': ReconfigVM_Task, 'duration_secs': 0.309934} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.270229] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa858a42-b83e-4f08-b11b-a622ba922303 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570005', 'volume_id': 'e62ecc96-280f-49b1-b4a1-915281c6d7c5', 'name': 'volume-e62ecc96-280f-49b1-b4a1-915281c6d7c5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '0249ffb9-82ed-44db-bb20-e619eaa176dd', 'attached_at': '', 'detached_at': '', 'volume_id': 'e62ecc96-280f-49b1-b4a1-915281c6d7c5', 'serial': 'e62ecc96-280f-49b1-b4a1-915281c6d7c5'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1114.307780] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63cb151e-7e17-483d-bf15-c690ff24177c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.318555] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e1e7f7-c204-49e6-98ef-f6dd7637405d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.354982] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1114.356859] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57803a60-38ed-4d9c-a096-9e4090c7d8c3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.365801] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6a4e05-49ff-44ea-a51a-829342201437 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.384021] env[68638]: DEBUG nova.compute.provider_tree [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1114.417144] env[68638]: DEBUG oslo_concurrency.lockutils [req-167f571c-fdea-45e7-aa06-312a11fae412 req-9dbed11e-fee5-490e-b9bb-2b046601c3e9 service nova] Releasing lock "refresh_cache-4a0c0188-69bb-441e-a930-ab20be5b2319" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1114.450141] env[68638]: INFO nova.compute.manager [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Took 27.45 seconds to build instance. [ 1114.451404] env[68638]: DEBUG nova.network.neutron [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Successfully updated port: efe8e56e-2095-427b-ab7a-31cbdb0521ca {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1114.514127] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "refresh_cache-e0903192-4fa7-437a-9023-33e8e65124e3" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.514436] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired lock "refresh_cache-e0903192-4fa7-437a-9023-33e8e65124e3" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1114.514646] env[68638]: DEBUG nova.network.neutron [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1114.515870] env[68638]: DEBUG nova.compute.manager [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Stashing vm_state: active {{(pid=68638) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1114.611180] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f456a7-4f71-3b74-7dec-8a91cf9a59c3, 'name': SearchDatastore_Task, 'duration_secs': 0.012491} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.611903] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72662163-1cc7-4177-ac81-845adeec8e82 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.617041] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1114.617041] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e0fc11-1853-5686-cb82-b258988984d3" [ 1114.617041] env[68638]: _type = "Task" [ 1114.617041] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.624919] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e0fc11-1853-5686-cb82-b258988984d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.733748] env[68638]: DEBUG nova.compute.manager [req-cb60c740-0282-4514-97b0-d41d18a394c0 req-a46df04c-a568-44a0-84f3-5451cdfd469a service nova] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Received event network-vif-plugged-efe8e56e-2095-427b-ab7a-31cbdb0521ca {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1114.733984] env[68638]: DEBUG oslo_concurrency.lockutils [req-cb60c740-0282-4514-97b0-d41d18a394c0 req-a46df04c-a568-44a0-84f3-5451cdfd469a service nova] Acquiring lock "4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1114.734121] env[68638]: DEBUG oslo_concurrency.lockutils [req-cb60c740-0282-4514-97b0-d41d18a394c0 req-a46df04c-a568-44a0-84f3-5451cdfd469a service nova] Lock "4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.734370] env[68638]: DEBUG oslo_concurrency.lockutils [req-cb60c740-0282-4514-97b0-d41d18a394c0 req-a46df04c-a568-44a0-84f3-5451cdfd469a service nova] Lock "4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1114.734686] env[68638]: DEBUG nova.compute.manager [req-cb60c740-0282-4514-97b0-d41d18a394c0 req-a46df04c-a568-44a0-84f3-5451cdfd469a service nova] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] No waiting events found dispatching network-vif-plugged-efe8e56e-2095-427b-ab7a-31cbdb0521ca {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1114.734927] env[68638]: WARNING nova.compute.manager [req-cb60c740-0282-4514-97b0-d41d18a394c0 req-a46df04c-a568-44a0-84f3-5451cdfd469a service nova] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Received unexpected event network-vif-plugged-efe8e56e-2095-427b-ab7a-31cbdb0521ca for instance with vm_state building and task_state spawning. [ 1114.735110] env[68638]: DEBUG nova.compute.manager [req-cb60c740-0282-4514-97b0-d41d18a394c0 req-a46df04c-a568-44a0-84f3-5451cdfd469a service nova] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Received event network-changed-efe8e56e-2095-427b-ab7a-31cbdb0521ca {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1114.735270] env[68638]: DEBUG nova.compute.manager [req-cb60c740-0282-4514-97b0-d41d18a394c0 req-a46df04c-a568-44a0-84f3-5451cdfd469a service nova] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Refreshing instance network info cache due to event network-changed-efe8e56e-2095-427b-ab7a-31cbdb0521ca. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1114.735451] env[68638]: DEBUG oslo_concurrency.lockutils [req-cb60c740-0282-4514-97b0-d41d18a394c0 req-a46df04c-a568-44a0-84f3-5451cdfd469a service nova] Acquiring lock "refresh_cache-4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.735630] env[68638]: DEBUG oslo_concurrency.lockutils [req-cb60c740-0282-4514-97b0-d41d18a394c0 req-a46df04c-a568-44a0-84f3-5451cdfd469a service nova] Acquired lock "refresh_cache-4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1114.735734] env[68638]: DEBUG nova.network.neutron [req-cb60c740-0282-4514-97b0-d41d18a394c0 req-a46df04c-a568-44a0-84f3-5451cdfd469a service nova] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Refreshing network info cache for port efe8e56e-2095-427b-ab7a-31cbdb0521ca {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1114.814464] env[68638]: DEBUG nova.objects.instance [None req-fa858a42-b83e-4f08-b11b-a622ba922303 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lazy-loading 'flavor' on Instance uuid 0249ffb9-82ed-44db-bb20-e619eaa176dd {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1114.886558] env[68638]: DEBUG nova.scheduler.client.report [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1114.952117] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2d01cf2c-2b99-476e-a1d6-27e947219e76 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "f9bd4416-b2c3-4bdd-9066-08935d304765" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.372s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1114.953697] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "refresh_cache-4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.043639] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.127740] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e0fc11-1853-5686-cb82-b258988984d3, 'name': SearchDatastore_Task, 'duration_secs': 0.047158} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.127966] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1115.128778] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 4a0c0188-69bb-441e-a930-ab20be5b2319/4a0c0188-69bb-441e-a930-ab20be5b2319.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1115.128778] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4aafe3ab-47b7-45cf-b141-65534c4a45a9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.138497] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1115.138497] env[68638]: value = "task-2834379" [ 1115.138497] env[68638]: _type = "Task" [ 1115.138497] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.148095] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834379, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.250066] env[68638]: DEBUG nova.network.neutron [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Updating instance_info_cache with network_info: [{"id": "ef048785-d375-47e3-9f3c-2f26fd1bb175", "address": "fa:16:3e:65:11:44", "network": {"id": "2181efd7-a094-4c4b-8754-da82e89be85a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1274773453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "efa342b9d9a34e9e8e708c8f356f905e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef048785-d3", "ovs_interfaceid": "ef048785-d375-47e3-9f3c-2f26fd1bb175", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.272403] env[68638]: DEBUG nova.network.neutron [req-cb60c740-0282-4514-97b0-d41d18a394c0 req-a46df04c-a568-44a0-84f3-5451cdfd469a service nova] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1115.284874] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b4e05570-e0df-4d1d-8fda-f247ae04201d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "f9bd4416-b2c3-4bdd-9066-08935d304765" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.285102] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b4e05570-e0df-4d1d-8fda-f247ae04201d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "f9bd4416-b2c3-4bdd-9066-08935d304765" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.285286] env[68638]: DEBUG nova.compute.manager [None req-b4e05570-e0df-4d1d-8fda-f247ae04201d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1115.286212] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9378ebf4-4550-4053-a8d9-0a39b2fa8073 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.293805] env[68638]: DEBUG nova.compute.manager [None req-b4e05570-e0df-4d1d-8fda-f247ae04201d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68638) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1115.294378] env[68638]: DEBUG nova.objects.instance [None req-b4e05570-e0df-4d1d-8fda-f247ae04201d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lazy-loading 'flavor' on Instance uuid f9bd4416-b2c3-4bdd-9066-08935d304765 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1115.386629] env[68638]: DEBUG nova.network.neutron [req-cb60c740-0282-4514-97b0-d41d18a394c0 req-a46df04c-a568-44a0-84f3-5451cdfd469a service nova] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.391599] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.395s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.395979] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.352s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.413489] env[68638]: INFO nova.scheduler.client.report [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Deleted allocations for instance cb8611f1-d987-43f9-bb4e-4b404c952510 [ 1115.652203] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834379, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.753394] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Releasing lock "refresh_cache-e0903192-4fa7-437a-9023-33e8e65124e3" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1115.821126] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fa858a42-b83e-4f08-b11b-a622ba922303 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "0249ffb9-82ed-44db-bb20-e619eaa176dd" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.243s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.845060] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1115.890093] env[68638]: DEBUG oslo_concurrency.lockutils [req-cb60c740-0282-4514-97b0-d41d18a394c0 req-a46df04c-a568-44a0-84f3-5451cdfd469a service nova] Releasing lock "refresh_cache-4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1115.890506] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired lock "refresh_cache-4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1115.890842] env[68638]: DEBUG nova.network.neutron [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1115.900742] env[68638]: INFO nova.compute.claims [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1115.920824] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9a783c6c-17ea-4f03-a92f-a3a5332156a5 tempest-ServerMetadataNegativeTestJSON-1998517186 tempest-ServerMetadataNegativeTestJSON-1998517186-project-member] Lock "cb8611f1-d987-43f9-bb4e-4b404c952510" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.508s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.991937] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "0249ffb9-82ed-44db-bb20-e619eaa176dd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.992350] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "0249ffb9-82ed-44db-bb20-e619eaa176dd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.992455] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "0249ffb9-82ed-44db-bb20-e619eaa176dd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.993083] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "0249ffb9-82ed-44db-bb20-e619eaa176dd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.993398] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "0249ffb9-82ed-44db-bb20-e619eaa176dd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.995559] env[68638]: INFO nova.compute.manager [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Terminating instance [ 1116.150788] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834379, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.80223} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.151038] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 4a0c0188-69bb-441e-a930-ab20be5b2319/4a0c0188-69bb-441e-a930-ab20be5b2319.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1116.151274] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1116.151552] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5bf93d2d-a012-43b0-85ef-ea8ed9acd18e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.159349] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1116.159349] env[68638]: value = "task-2834381" [ 1116.159349] env[68638]: _type = "Task" [ 1116.159349] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.168009] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834381, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.302112] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4e05570-e0df-4d1d-8fda-f247ae04201d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1116.302431] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-54d8c9c3-3804-4979-8197-313f516d8939 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.311250] env[68638]: DEBUG oslo_vmware.api [None req-b4e05570-e0df-4d1d-8fda-f247ae04201d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1116.311250] env[68638]: value = "task-2834382" [ 1116.311250] env[68638]: _type = "Task" [ 1116.311250] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.321278] env[68638]: DEBUG oslo_vmware.api [None req-b4e05570-e0df-4d1d-8fda-f247ae04201d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834382, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.340732] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1116.409756] env[68638]: INFO nova.compute.resource_tracker [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Updating resource usage from migration a52636b4-6b32-44a1-963d-dab28c234feb [ 1116.426462] env[68638]: DEBUG nova.network.neutron [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1116.499926] env[68638]: DEBUG nova.compute.manager [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1116.500196] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1116.502184] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa03540b-6bd4-4160-b295-4ce3945ee729 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.518487] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1116.519355] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-773de446-0836-4658-bf98-8ed0f08e61fa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.527711] env[68638]: DEBUG oslo_vmware.api [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1116.527711] env[68638]: value = "task-2834383" [ 1116.527711] env[68638]: _type = "Task" [ 1116.527711] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.538144] env[68638]: DEBUG oslo_vmware.api [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834383, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.667041] env[68638]: DEBUG nova.network.neutron [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Updating instance_info_cache with network_info: [{"id": "efe8e56e-2095-427b-ab7a-31cbdb0521ca", "address": "fa:16:3e:20:cb:e4", "network": {"id": "ad22ed5c-0d03-45c8-8bc4-c4f51dbac4fc", "bridge": "br-int", "label": "tempest-ServersTestJSON-2147381832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98a35cb6ae4d4c8688fb89d7da0b2dd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefe8e56e-20", "ovs_interfaceid": "efe8e56e-2095-427b-ab7a-31cbdb0521ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.685349] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834381, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065794} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.685534] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1116.687521] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc389a93-30b1-4484-86b6-4bf1b65fe4e3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.717971] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 4a0c0188-69bb-441e-a930-ab20be5b2319/4a0c0188-69bb-441e-a930-ab20be5b2319.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1116.719723] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66001759-25f9-4235-bfd2-3b4c9c73bc65 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.735286] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b482e0ca-b902-4eac-9fe5-46fb2bc7d959 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.743909] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b2e1db8-1dd6-4a85-bbc3-702b8230d5f4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.748368] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1116.748368] env[68638]: value = "task-2834384" [ 1116.748368] env[68638]: _type = "Task" [ 1116.748368] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.785409] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb62a36-3590-47ac-9008-7cb607e4a8b4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.791806] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834384, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.798341] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d72eff21-71f4-443e-a3e7-09e05cf53abf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.814176] env[68638]: DEBUG nova.compute.provider_tree [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1116.824331] env[68638]: DEBUG oslo_vmware.api [None req-b4e05570-e0df-4d1d-8fda-f247ae04201d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834382, 'name': PowerOffVM_Task, 'duration_secs': 0.175154} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.825145] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4e05570-e0df-4d1d-8fda-f247ae04201d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1116.825351] env[68638]: DEBUG nova.compute.manager [None req-b4e05570-e0df-4d1d-8fda-f247ae04201d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1116.826147] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bcdcd6f-e561-4af8-8b5d-512d69c77917 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.041866] env[68638]: DEBUG oslo_vmware.api [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834383, 'name': PowerOffVM_Task, 'duration_secs': 0.474853} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.042356] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1117.042678] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1117.044901] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-62cedf49-faaa-43ef-8206-487961a9b8e5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.140353] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1117.140353] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1117.140353] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Deleting the datastore file [datastore2] 0249ffb9-82ed-44db-bb20-e619eaa176dd {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1117.140353] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1c678e6f-b1e6-496d-a1b2-783f073418bc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.148223] env[68638]: DEBUG oslo_vmware.api [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1117.148223] env[68638]: value = "task-2834386" [ 1117.148223] env[68638]: _type = "Task" [ 1117.148223] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.157488] env[68638]: DEBUG oslo_vmware.api [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834386, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.177059] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Releasing lock "refresh_cache-4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1117.177059] env[68638]: DEBUG nova.compute.manager [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Instance network_info: |[{"id": "efe8e56e-2095-427b-ab7a-31cbdb0521ca", "address": "fa:16:3e:20:cb:e4", "network": {"id": "ad22ed5c-0d03-45c8-8bc4-c4f51dbac4fc", "bridge": "br-int", "label": "tempest-ServersTestJSON-2147381832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98a35cb6ae4d4c8688fb89d7da0b2dd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "46e1fc20-2067-4e1a-9812-702772a2c82c", "external-id": "nsx-vlan-transportzone-210", "segmentation_id": 210, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefe8e56e-20", "ovs_interfaceid": "efe8e56e-2095-427b-ab7a-31cbdb0521ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1117.177059] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:cb:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '46e1fc20-2067-4e1a-9812-702772a2c82c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'efe8e56e-2095-427b-ab7a-31cbdb0521ca', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1117.183893] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1117.184192] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1117.184979] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-444ebf5b-4dd2-4cd8-95c1-f5c3b4edf0b2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.205736] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1117.205736] env[68638]: value = "task-2834387" [ 1117.205736] env[68638]: _type = "Task" [ 1117.205736] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.218084] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834387, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.259408] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834384, 'name': ReconfigVM_Task, 'duration_secs': 0.296255} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.260156] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 4a0c0188-69bb-441e-a930-ab20be5b2319/4a0c0188-69bb-441e-a930-ab20be5b2319.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1117.260436] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-19f39ec9-23e1-43cb-8e68-fe9135adcd28 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.267030] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1117.267030] env[68638]: value = "task-2834388" [ 1117.267030] env[68638]: _type = "Task" [ 1117.267030] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.279354] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834388, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.297897] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228f0326-8316-4696-945b-d5fac4a2a2c2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.316629] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Updating instance 'e0903192-4fa7-437a-9023-33e8e65124e3' progress to 0 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1117.338702] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b4e05570-e0df-4d1d-8fda-f247ae04201d tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "f9bd4416-b2c3-4bdd-9066-08935d304765" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.053s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1117.339681] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1117.340239] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1117.340239] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Cleaning up deleted instances {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11753}} [ 1117.342407] env[68638]: ERROR nova.scheduler.client.report [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [req-ec56a7bf-371a-43ea-a117-d90e4da83c88] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ec56a7bf-371a-43ea-a117-d90e4da83c88"}]} [ 1117.362368] env[68638]: DEBUG nova.scheduler.client.report [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1117.379244] env[68638]: DEBUG nova.scheduler.client.report [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1117.380410] env[68638]: DEBUG nova.compute.provider_tree [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1117.395258] env[68638]: DEBUG nova.scheduler.client.report [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1117.423444] env[68638]: DEBUG nova.scheduler.client.report [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1117.660582] env[68638]: DEBUG oslo_vmware.api [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834386, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140774} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.663280] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1117.663486] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1117.663667] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1117.664210] env[68638]: INFO nova.compute.manager [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1117.664210] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1117.664538] env[68638]: DEBUG nova.compute.manager [-] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1117.664641] env[68638]: DEBUG nova.network.neutron [-] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1117.713653] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "f9bd4416-b2c3-4bdd-9066-08935d304765" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.714630] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "f9bd4416-b2c3-4bdd-9066-08935d304765" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1117.714630] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "f9bd4416-b2c3-4bdd-9066-08935d304765-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.714840] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "f9bd4416-b2c3-4bdd-9066-08935d304765-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1117.715019] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "f9bd4416-b2c3-4bdd-9066-08935d304765-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1117.721809] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cff0be7-7734-4f64-b4a9-6f2a66fee88c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.728718] env[68638]: INFO nova.compute.manager [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Terminating instance [ 1117.731384] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834387, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.739650] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4194c989-a9ef-4d3b-a000-1742a1a3becc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.781520] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-160256f9-de00-49fc-8497-65a964d44ada {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.795022] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ffe19fe-9eea-4768-b406-6f1210a51e65 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.798063] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834388, 'name': Rename_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.811028] env[68638]: DEBUG nova.compute.provider_tree [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1117.825803] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1117.826137] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b39b6f4f-4eef-46c8-a99d-9be3da292838 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.834824] env[68638]: DEBUG oslo_vmware.api [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1117.834824] env[68638]: value = "task-2834389" [ 1117.834824] env[68638]: _type = "Task" [ 1117.834824] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.844703] env[68638]: DEBUG oslo_vmware.api [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834389, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.858467] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] There are 51 instances to clean {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11762}} [ 1117.858703] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: cb8611f1-d987-43f9-bb4e-4b404c952510] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1117.910925] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-68545ff0-3063-4b2b-8d2a-e85020bd2de0 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Volume attach. Driver type: vmdk {{(pid=68638) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1117.911216] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-68545ff0-3063-4b2b-8d2a-e85020bd2de0 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570036', 'volume_id': '66d78f72-80b8-487d-8315-0d99a3f6172d', 'name': 'volume-66d78f72-80b8-487d-8315-0d99a3f6172d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ace44b04-6dcf-4845-af4e-b28ddeebe60e', 'attached_at': '', 'detached_at': '', 'volume_id': '66d78f72-80b8-487d-8315-0d99a3f6172d', 'serial': '66d78f72-80b8-487d-8315-0d99a3f6172d'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1117.912175] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dddc3f0a-cfe3-4df2-8e16-08843c16720c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.933717] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eec32109-3bfe-4a37-af78-3e06db99e526 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.961416] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-68545ff0-3063-4b2b-8d2a-e85020bd2de0 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] volume-66d78f72-80b8-487d-8315-0d99a3f6172d/volume-66d78f72-80b8-487d-8315-0d99a3f6172d.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1117.961775] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4813544d-9b6e-4610-b232-7450308e77f4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.996074] env[68638]: DEBUG oslo_vmware.api [None req-68545ff0-3063-4b2b-8d2a-e85020bd2de0 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1117.996074] env[68638]: value = "task-2834390" [ 1117.996074] env[68638]: _type = "Task" [ 1117.996074] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.008930] env[68638]: DEBUG oslo_vmware.api [None req-68545ff0-3063-4b2b-8d2a-e85020bd2de0 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834390, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.190247] env[68638]: DEBUG nova.compute.manager [req-9879fdf2-594f-41ee-97b3-92f0314af7be req-e9d09017-d34f-413b-8997-e3a40612c02b service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Received event network-vif-deleted-d0023f1c-323c-4f1c-a82c-45ad56565341 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1118.190489] env[68638]: INFO nova.compute.manager [req-9879fdf2-594f-41ee-97b3-92f0314af7be req-e9d09017-d34f-413b-8997-e3a40612c02b service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Neutron deleted interface d0023f1c-323c-4f1c-a82c-45ad56565341; detaching it from the instance and deleting it from the info cache [ 1118.190717] env[68638]: DEBUG nova.network.neutron [req-9879fdf2-594f-41ee-97b3-92f0314af7be req-e9d09017-d34f-413b-8997-e3a40612c02b service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.222542] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834387, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.234805] env[68638]: DEBUG nova.compute.manager [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1118.235112] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1118.236080] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cca1cdd3-156c-4b15-aaac-c74095188f93 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.245909] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1118.245909] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1da5af0f-6d03-4c26-ad8a-80541fb899d5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.288647] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834388, 'name': Rename_Task, 'duration_secs': 0.878073} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.288953] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1118.289224] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6401d8db-c491-4c5c-913a-9a220294d7bc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.298998] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1118.298998] env[68638]: value = "task-2834392" [ 1118.298998] env[68638]: _type = "Task" [ 1118.298998] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.309115] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834392, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.314074] env[68638]: DEBUG nova.scheduler.client.report [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1118.317586] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1118.317838] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1118.318018] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Deleting the datastore file [datastore1] f9bd4416-b2c3-4bdd-9066-08935d304765 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1118.318482] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-32a4ba75-b941-4206-b923-af07d03461ba {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.326845] env[68638]: DEBUG oslo_vmware.api [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1118.326845] env[68638]: value = "task-2834393" [ 1118.326845] env[68638]: _type = "Task" [ 1118.326845] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.336487] env[68638]: DEBUG oslo_vmware.api [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834393, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.346579] env[68638]: DEBUG oslo_vmware.api [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834389, 'name': PowerOffVM_Task, 'duration_secs': 0.232016} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.346917] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1118.347085] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Updating instance 'e0903192-4fa7-437a-9023-33e8e65124e3' progress to 17 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1118.362873] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: a8bd64fb-8a07-4edf-a1fb-c2984e4212ec] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1118.511432] env[68638]: DEBUG oslo_vmware.api [None req-68545ff0-3063-4b2b-8d2a-e85020bd2de0 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834390, 'name': ReconfigVM_Task, 'duration_secs': 0.399533} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.511991] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-68545ff0-3063-4b2b-8d2a-e85020bd2de0 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Reconfigured VM instance instance-00000065 to attach disk [datastore1] volume-66d78f72-80b8-487d-8315-0d99a3f6172d/volume-66d78f72-80b8-487d-8315-0d99a3f6172d.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1118.517994] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-486d2213-3d25-4cee-b34c-5231366f48b6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.541979] env[68638]: DEBUG oslo_vmware.api [None req-68545ff0-3063-4b2b-8d2a-e85020bd2de0 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1118.541979] env[68638]: value = "task-2834394" [ 1118.541979] env[68638]: _type = "Task" [ 1118.541979] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.551485] env[68638]: DEBUG oslo_vmware.api [None req-68545ff0-3063-4b2b-8d2a-e85020bd2de0 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834394, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.666696] env[68638]: DEBUG nova.network.neutron [-] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.694092] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e6f5957a-244e-426f-936a-f41e2d9c61af {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.704945] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d4ba9b0-7608-4cd1-9f08-57781c3b861f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.726770] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834387, 'name': CreateVM_Task, 'duration_secs': 1.083459} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.726959] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1118.727673] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.727841] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1118.728181] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1118.728442] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-faa8d87a-260c-42b1-9e1a-b2884c45cf5f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.734980] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1118.734980] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52f11f1f-554e-b598-c64a-ba23c7127833" [ 1118.734980] env[68638]: _type = "Task" [ 1118.734980] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.743884] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f11f1f-554e-b598-c64a-ba23c7127833, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.758517] env[68638]: DEBUG nova.compute.manager [req-9879fdf2-594f-41ee-97b3-92f0314af7be req-e9d09017-d34f-413b-8997-e3a40612c02b service nova] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Detach interface failed, port_id=d0023f1c-323c-4f1c-a82c-45ad56565341, reason: Instance 0249ffb9-82ed-44db-bb20-e619eaa176dd could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1118.810980] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834392, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.820099] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.425s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1118.820316] env[68638]: INFO nova.compute.manager [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Migrating [ 1118.846981] env[68638]: DEBUG oslo_vmware.api [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834393, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.216631} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.846981] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1118.847175] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1118.847361] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1118.847652] env[68638]: INFO nova.compute.manager [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1118.847911] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1118.848117] env[68638]: DEBUG nova.compute.manager [-] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1118.848217] env[68638]: DEBUG nova.network.neutron [-] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1118.853031] env[68638]: DEBUG nova.virt.hardware [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1118.853226] env[68638]: DEBUG nova.virt.hardware [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1118.853386] env[68638]: DEBUG nova.virt.hardware [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1118.853582] env[68638]: DEBUG nova.virt.hardware [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1118.853710] env[68638]: DEBUG nova.virt.hardware [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1118.853859] env[68638]: DEBUG nova.virt.hardware [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1118.854074] env[68638]: DEBUG nova.virt.hardware [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1118.854238] env[68638]: DEBUG nova.virt.hardware [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1118.854522] env[68638]: DEBUG nova.virt.hardware [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1118.854583] env[68638]: DEBUG nova.virt.hardware [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1118.854720] env[68638]: DEBUG nova.virt.hardware [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1118.860956] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bfbd932b-5be8-4909-a7c5-680431a6c6e1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.872090] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 6213446a-f6a4-439b-a1ed-5b8c2234d6ac] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1118.881042] env[68638]: DEBUG oslo_vmware.api [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1118.881042] env[68638]: value = "task-2834395" [ 1118.881042] env[68638]: _type = "Task" [ 1118.881042] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.892076] env[68638]: DEBUG oslo_vmware.api [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834395, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.055878] env[68638]: DEBUG oslo_vmware.api [None req-68545ff0-3063-4b2b-8d2a-e85020bd2de0 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834394, 'name': ReconfigVM_Task, 'duration_secs': 0.149652} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.057056] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-68545ff0-3063-4b2b-8d2a-e85020bd2de0 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570036', 'volume_id': '66d78f72-80b8-487d-8315-0d99a3f6172d', 'name': 'volume-66d78f72-80b8-487d-8315-0d99a3f6172d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ace44b04-6dcf-4845-af4e-b28ddeebe60e', 'attached_at': '', 'detached_at': '', 'volume_id': '66d78f72-80b8-487d-8315-0d99a3f6172d', 'serial': '66d78f72-80b8-487d-8315-0d99a3f6172d'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1119.169720] env[68638]: INFO nova.compute.manager [-] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Took 1.50 seconds to deallocate network for instance. [ 1119.246648] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f11f1f-554e-b598-c64a-ba23c7127833, 'name': SearchDatastore_Task, 'duration_secs': 0.029943} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.247056] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1119.247361] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1119.247677] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1119.247903] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1119.248222] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1119.248539] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d854ec44-a8d7-4893-abdf-d7c4764823a0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.260505] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1119.260697] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1119.261548] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f726d62b-3f93-4dbc-bc97-5438cbc35c7c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.267763] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1119.267763] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52b2d1a0-e435-adc1-284d-be21429e1509" [ 1119.267763] env[68638]: _type = "Task" [ 1119.267763] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.276161] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b2d1a0-e435-adc1-284d-be21429e1509, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.311278] env[68638]: DEBUG oslo_vmware.api [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834392, 'name': PowerOnVM_Task, 'duration_secs': 0.552131} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.311574] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1119.311798] env[68638]: INFO nova.compute.manager [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Took 8.26 seconds to spawn the instance on the hypervisor. [ 1119.312057] env[68638]: DEBUG nova.compute.manager [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1119.312861] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1274e58f-4f84-41a6-9297-551d3f7b5733 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.340478] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "refresh_cache-ba07529b-e6d0-4c22-b938-c4908a7eafd7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1119.340667] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired lock "refresh_cache-ba07529b-e6d0-4c22-b938-c4908a7eafd7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1119.340907] env[68638]: DEBUG nova.network.neutron [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1119.374945] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 9ddb29ae-9724-4712-af58-4b8d6546c6af] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1119.394581] env[68638]: DEBUG oslo_vmware.api [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834395, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.608343] env[68638]: DEBUG nova.network.neutron [-] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1119.677260] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1119.677610] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1119.677837] env[68638]: DEBUG nova.objects.instance [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lazy-loading 'resources' on Instance uuid 0249ffb9-82ed-44db-bb20-e619eaa176dd {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1119.779301] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b2d1a0-e435-adc1-284d-be21429e1509, 'name': SearchDatastore_Task, 'duration_secs': 0.039962} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.780750] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3daef033-eaf1-4ad7-9100-f647bc4d2c6f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.787567] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1119.787567] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5252bb0a-ba0b-27f3-2b4a-14403a3edca7" [ 1119.787567] env[68638]: _type = "Task" [ 1119.787567] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.797669] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5252bb0a-ba0b-27f3-2b4a-14403a3edca7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.830958] env[68638]: INFO nova.compute.manager [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Took 18.66 seconds to build instance. [ 1119.878672] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 71ec29a8-5e2f-4ccd-9c22-d9721c77622e] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1119.894233] env[68638]: DEBUG oslo_vmware.api [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834395, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.075632] env[68638]: DEBUG nova.network.neutron [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Updating instance_info_cache with network_info: [{"id": "22c8d069-e6d1-4644-89d8-516903e4ef3d", "address": "fa:16:3e:07:da:cc", "network": {"id": "e7719a30-81aa-48f1-a272-5246f78d9891", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1890376720-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fdd5447a0546b7b0fe2ed9ea0efc73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22c8d069-e6", "ovs_interfaceid": "22c8d069-e6d1-4644-89d8-516903e4ef3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1120.099993] env[68638]: DEBUG nova.objects.instance [None req-68545ff0-3063-4b2b-8d2a-e85020bd2de0 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lazy-loading 'flavor' on Instance uuid ace44b04-6dcf-4845-af4e-b28ddeebe60e {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1120.112732] env[68638]: INFO nova.compute.manager [-] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Took 1.26 seconds to deallocate network for instance. [ 1120.215056] env[68638]: DEBUG nova.compute.manager [req-6025348d-13bd-4543-ac73-2039ecc98061 req-f89ad4ba-4c14-4d3c-a38e-f75e22ed91ad service nova] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Received event network-vif-deleted-252d498a-0dc8-42d6-9e73-c86004373452 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1120.268949] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquiring lock "4a0c0188-69bb-441e-a930-ab20be5b2319" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.301120] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5252bb0a-ba0b-27f3-2b4a-14403a3edca7, 'name': SearchDatastore_Task, 'duration_secs': 0.030348} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.303661] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1120.303950] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4/4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1120.304929] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f2f30148-3d4a-4eb1-9230-db1c5ac83d7d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.313269] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1120.313269] env[68638]: value = "task-2834396" [ 1120.313269] env[68638]: _type = "Task" [ 1120.313269] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.324220] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834396, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.329318] env[68638]: DEBUG oslo_concurrency.lockutils [None req-362d3035-a573-4d24-b875-738b0bab02d5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.329601] env[68638]: DEBUG oslo_concurrency.lockutils [None req-362d3035-a573-4d24-b875-738b0bab02d5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.329822] env[68638]: DEBUG nova.compute.manager [None req-362d3035-a573-4d24-b875-738b0bab02d5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1120.330688] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ced6042-1c91-4d5b-9873-5d99fda78a42 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.333510] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aa0cdd23-bfdb-4914-bd1b-c211f67f0c78 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "4a0c0188-69bb-441e-a930-ab20be5b2319" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.173s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.333803] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "4a0c0188-69bb-441e-a930-ab20be5b2319" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.065s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.334055] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquiring lock "4a0c0188-69bb-441e-a930-ab20be5b2319-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.334262] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "4a0c0188-69bb-441e-a930-ab20be5b2319-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.334437] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "4a0c0188-69bb-441e-a930-ab20be5b2319-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.336616] env[68638]: INFO nova.compute.manager [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Terminating instance [ 1120.342838] env[68638]: DEBUG nova.compute.manager [None req-362d3035-a573-4d24-b875-738b0bab02d5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68638) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1120.344458] env[68638]: DEBUG nova.objects.instance [None req-362d3035-a573-4d24-b875-738b0bab02d5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lazy-loading 'flavor' on Instance uuid 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1120.382457] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 2cdcff10-089b-47fd-ba41-2e3a75cd33b0] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1120.399379] env[68638]: DEBUG oslo_vmware.api [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834395, 'name': ReconfigVM_Task, 'duration_secs': 1.230558} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.399635] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Updating instance 'e0903192-4fa7-437a-9023-33e8e65124e3' progress to 33 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1120.439823] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-192a22f2-a5c7-47ab-bdab-3ae6648463d3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.450465] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159cfa4f-1d5f-42c6-b350-cb6a68fb7160 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.483383] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a66ece-c621-4545-a3aa-826bedb2c425 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.492654] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c4077f-440a-4002-8840-08402a793ebc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.508395] env[68638]: DEBUG nova.compute.provider_tree [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1120.579669] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Releasing lock "refresh_cache-ba07529b-e6d0-4c22-b938-c4908a7eafd7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1120.605414] env[68638]: DEBUG oslo_concurrency.lockutils [None req-68545ff0-3063-4b2b-8d2a-e85020bd2de0 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "ace44b04-6dcf-4845-af4e-b28ddeebe60e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.329s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.622123] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.713080] env[68638]: INFO nova.compute.manager [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Rescuing [ 1120.713080] env[68638]: DEBUG oslo_concurrency.lockutils [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "refresh_cache-ace44b04-6dcf-4845-af4e-b28ddeebe60e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.713376] env[68638]: DEBUG oslo_concurrency.lockutils [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquired lock "refresh_cache-ace44b04-6dcf-4845-af4e-b28ddeebe60e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1120.713376] env[68638]: DEBUG nova.network.neutron [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1120.828076] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834396, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.846027] env[68638]: DEBUG nova.compute.manager [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1120.846447] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1120.849086] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75458c7b-36a3-4a8d-ba68-5d3902f25eb3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.860213] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1120.861038] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eab04e9b-990e-4c50-8ead-a42130634a40 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.872816] env[68638]: DEBUG oslo_vmware.api [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1120.872816] env[68638]: value = "task-2834397" [ 1120.872816] env[68638]: _type = "Task" [ 1120.872816] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.884966] env[68638]: DEBUG oslo_vmware.api [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834397, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.889616] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 4765bf70-1a72-4102-b5d3-ccedb7c383ea] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1120.907034] env[68638]: DEBUG nova.virt.hardware [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1120.907241] env[68638]: DEBUG nova.virt.hardware [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1120.907433] env[68638]: DEBUG nova.virt.hardware [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1120.907629] env[68638]: DEBUG nova.virt.hardware [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1120.907815] env[68638]: DEBUG nova.virt.hardware [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1120.908056] env[68638]: DEBUG nova.virt.hardware [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1120.908325] env[68638]: DEBUG nova.virt.hardware [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1120.908553] env[68638]: DEBUG nova.virt.hardware [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1120.908827] env[68638]: DEBUG nova.virt.hardware [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1120.908930] env[68638]: DEBUG nova.virt.hardware [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1120.909164] env[68638]: DEBUG nova.virt.hardware [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1120.915563] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Reconfiguring VM instance instance-0000006b to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1120.915563] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a89980a8-3f9d-47ce-9fca-799a6edb1e7e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.938164] env[68638]: DEBUG oslo_vmware.api [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1120.938164] env[68638]: value = "task-2834398" [ 1120.938164] env[68638]: _type = "Task" [ 1120.938164] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.953025] env[68638]: DEBUG oslo_vmware.api [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834398, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.011622] env[68638]: DEBUG nova.scheduler.client.report [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1121.326763] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834396, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.690417} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.329048] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4/4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1121.329317] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1121.329607] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0bdd4ab8-7d44-4402-b168-46ece20296ab {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.337484] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1121.337484] env[68638]: value = "task-2834399" [ 1121.337484] env[68638]: _type = "Task" [ 1121.337484] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.346214] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834399, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.354937] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-362d3035-a573-4d24-b875-738b0bab02d5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1121.355231] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f246a644-7a03-41c3-bd26-d3cbcdfc26f5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.366421] env[68638]: DEBUG oslo_vmware.api [None req-362d3035-a573-4d24-b875-738b0bab02d5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1121.366421] env[68638]: value = "task-2834400" [ 1121.366421] env[68638]: _type = "Task" [ 1121.366421] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.374634] env[68638]: DEBUG oslo_vmware.api [None req-362d3035-a573-4d24-b875-738b0bab02d5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834400, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.383722] env[68638]: DEBUG oslo_vmware.api [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834397, 'name': PowerOffVM_Task, 'duration_secs': 0.271989} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.384025] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1121.384202] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1121.384461] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-061d8cb1-7495-46e2-89cc-7411fbec73ca {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.393083] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: dcaef2e3-eb23-4a0b-b617-2880084e03ab] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1121.448938] env[68638]: DEBUG oslo_vmware.api [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834398, 'name': ReconfigVM_Task, 'duration_secs': 0.206464} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.449265] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Reconfigured VM instance instance-0000006b to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1121.450080] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8680799e-91de-4ade-ae7c-c32ef0c891aa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.473075] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] volume-aba426ca-0b6c-4510-8544-7a9bd4b9af38/volume-aba426ca-0b6c-4510-8544-7a9bd4b9af38.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1121.474331] env[68638]: DEBUG nova.network.neutron [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Updating instance_info_cache with network_info: [{"id": "acf48d4a-b19e-47d9-a807-d221c4f0fd05", "address": "fa:16:3e:93:77:d3", "network": {"id": "5f368894-f202-48ed-bdd5-62442b47a35d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2025484418-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e7777e8e5d342d68e2f54e23d125314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacf48d4a-b1", "ovs_interfaceid": "acf48d4a-b19e-47d9-a807-d221c4f0fd05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1121.475850] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35cc7eeb-9bbd-46b2-bf5d-64afabd85353 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.495842] env[68638]: DEBUG oslo_vmware.api [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1121.495842] env[68638]: value = "task-2834402" [ 1121.495842] env[68638]: _type = "Task" [ 1121.495842] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.509399] env[68638]: DEBUG oslo_vmware.api [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834402, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.516636] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.839s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1121.519658] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.898s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1121.519950] env[68638]: DEBUG nova.objects.instance [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lazy-loading 'resources' on Instance uuid f9bd4416-b2c3-4bdd-9066-08935d304765 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1121.542668] env[68638]: INFO nova.scheduler.client.report [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Deleted allocations for instance 0249ffb9-82ed-44db-bb20-e619eaa176dd [ 1121.847764] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834399, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071617} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.848141] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1121.848773] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca424f23-3179-4eaf-89ae-2071d66eef92 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.871047] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4/4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1121.871338] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d9cf68b-4bef-4d30-aae2-150e91f40768 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.893187] env[68638]: DEBUG oslo_vmware.api [None req-362d3035-a573-4d24-b875-738b0bab02d5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834400, 'name': PowerOffVM_Task, 'duration_secs': 0.346465} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.894025] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-362d3035-a573-4d24-b875-738b0bab02d5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1121.894236] env[68638]: DEBUG nova.compute.manager [None req-362d3035-a573-4d24-b875-738b0bab02d5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1121.894553] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1121.894553] env[68638]: value = "task-2834403" [ 1121.894553] env[68638]: _type = "Task" [ 1121.894553] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.895238] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4c216c-0e90-4739-80ff-2c6adc80f08f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.897832] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: e7559933-fecc-4eb6-ba71-a295fba684e4] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1121.910414] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834403, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.989951] env[68638]: DEBUG oslo_concurrency.lockutils [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Releasing lock "refresh_cache-ace44b04-6dcf-4845-af4e-b28ddeebe60e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1122.004894] env[68638]: DEBUG oslo_vmware.api [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834402, 'name': ReconfigVM_Task, 'duration_secs': 0.293557} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.005194] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Reconfigured VM instance instance-0000006b to attach disk [datastore1] volume-aba426ca-0b6c-4510-8544-7a9bd4b9af38/volume-aba426ca-0b6c-4510-8544-7a9bd4b9af38.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1122.005473] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Updating instance 'e0903192-4fa7-437a-9023-33e8e65124e3' progress to 50 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1122.052569] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e2193f5a-00aa-4a14-8f9c-382961acd11b tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "0249ffb9-82ed-44db-bb20-e619eaa176dd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.060s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1122.095188] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8b6f0b6-3e0e-4d1b-9c12-420ceb642f06 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.114212] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Updating instance 'ba07529b-e6d0-4c22-b938-c4908a7eafd7' progress to 0 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1122.163075] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1122.163410] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1122.163657] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Deleting the datastore file [datastore1] 4a0c0188-69bb-441e-a930-ab20be5b2319 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1122.163966] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-59eab82b-ae3e-4492-aaa7-72c2ba23a47c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.173575] env[68638]: DEBUG oslo_vmware.api [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1122.173575] env[68638]: value = "task-2834404" [ 1122.173575] env[68638]: _type = "Task" [ 1122.173575] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.182658] env[68638]: DEBUG oslo_vmware.api [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834404, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.276701] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a84e099-4bb6-435c-83c9-be00d4c6106a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.285055] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a6c0276-f55a-4384-b635-426ecb344716 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.316895] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ffb18af-bdc0-4a0e-921c-977e7006f966 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.325252] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a215c13a-e58e-4566-9eea-13a2fc1d8b1d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.339420] env[68638]: DEBUG nova.compute.provider_tree [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1122.403567] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: da886efd-bca9-45aa-abcc-13832c66a90c] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1122.409469] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834403, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.413399] env[68638]: DEBUG oslo_concurrency.lockutils [None req-362d3035-a573-4d24-b875-738b0bab02d5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.084s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1122.512491] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f744a41d-9f12-4f79-aee6-33f1d0bb0fa5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.538539] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5345a019-c62a-4364-83a9-c7bd8607df4f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.557108] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Updating instance 'e0903192-4fa7-437a-9023-33e8e65124e3' progress to 67 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1122.622954] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1122.623328] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6800077e-3307-4c8c-adee-7dc5fb3b2aa8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.631687] env[68638]: DEBUG oslo_vmware.api [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1122.631687] env[68638]: value = "task-2834405" [ 1122.631687] env[68638]: _type = "Task" [ 1122.631687] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.641541] env[68638]: DEBUG oslo_vmware.api [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834405, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.683924] env[68638]: DEBUG oslo_vmware.api [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834404, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.328494} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.684215] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1122.684403] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1122.684682] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1122.684877] env[68638]: INFO nova.compute.manager [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Took 1.84 seconds to destroy the instance on the hypervisor. [ 1122.685142] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1122.685373] env[68638]: DEBUG nova.compute.manager [-] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1122.685476] env[68638]: DEBUG nova.network.neutron [-] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1122.763137] env[68638]: DEBUG nova.objects.instance [None req-1b79ef3b-f260-4ef6-a9ca-598579cf5df0 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lazy-loading 'flavor' on Instance uuid 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1122.843235] env[68638]: DEBUG nova.scheduler.client.report [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1122.911852] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: fd329f9d-daf3-47ff-9c48-e1355fc012f4] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1122.915192] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834403, 'name': ReconfigVM_Task, 'duration_secs': 0.526389} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.916413] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4/4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1122.917624] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b4dda3ba-4a3c-4020-bc88-1ccdf46dfa5a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.927708] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1122.927708] env[68638]: value = "task-2834406" [ 1122.927708] env[68638]: _type = "Task" [ 1122.927708] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.941099] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834406, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.017733] env[68638]: DEBUG nova.compute.manager [req-92824d3c-d557-4411-bbc5-e6752c750f91 req-0fdd37fe-0d24-4b9f-8d82-5e5ed4470dba service nova] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Received event network-vif-deleted-f587042f-8bad-458a-9cd7-16e741d597ca {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1123.018243] env[68638]: INFO nova.compute.manager [req-92824d3c-d557-4411-bbc5-e6752c750f91 req-0fdd37fe-0d24-4b9f-8d82-5e5ed4470dba service nova] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Neutron deleted interface f587042f-8bad-458a-9cd7-16e741d597ca; detaching it from the instance and deleting it from the info cache [ 1123.019662] env[68638]: DEBUG nova.network.neutron [req-92824d3c-d557-4411-bbc5-e6752c750f91 req-0fdd37fe-0d24-4b9f-8d82-5e5ed4470dba service nova] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.141849] env[68638]: DEBUG oslo_vmware.api [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834405, 'name': PowerOffVM_Task, 'duration_secs': 0.399168} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.141955] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1123.142120] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Updating instance 'ba07529b-e6d0-4c22-b938-c4908a7eafd7' progress to 17 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1123.268639] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1b79ef3b-f260-4ef6-a9ca-598579cf5df0 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "refresh_cache-58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.268843] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1b79ef3b-f260-4ef6-a9ca-598579cf5df0 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired lock "refresh_cache-58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1123.269013] env[68638]: DEBUG nova.network.neutron [None req-1b79ef3b-f260-4ef6-a9ca-598579cf5df0 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1123.269201] env[68638]: DEBUG nova.objects.instance [None req-1b79ef3b-f260-4ef6-a9ca-598579cf5df0 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lazy-loading 'info_cache' on Instance uuid 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1123.348832] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.829s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.372124] env[68638]: INFO nova.scheduler.client.report [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Deleted allocations for instance f9bd4416-b2c3-4bdd-9066-08935d304765 [ 1123.416788] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: f0598d8d-09a9-44ce-b4d7-cb8830a84b94] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1123.440720] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834406, 'name': Rename_Task, 'duration_secs': 0.169066} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.440926] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1123.441197] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-44daf171-27da-4ebe-967c-7fcc8f74daba {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.457287] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1123.457287] env[68638]: value = "task-2834407" [ 1123.457287] env[68638]: _type = "Task" [ 1123.457287] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.467854] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834407, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.496188] env[68638]: DEBUG nova.network.neutron [-] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.524524] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cffa2832-6ac8-45fc-a889-989a44db89e1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.537120] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58ab4e01-930f-44a8-9e88-d179b6aeda65 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.549196] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1123.549577] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2cfd137c-e0a1-4031-bf4a-19bfe7bbc4f9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.558031] env[68638]: DEBUG oslo_vmware.api [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1123.558031] env[68638]: value = "task-2834408" [ 1123.558031] env[68638]: _type = "Task" [ 1123.558031] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.567639] env[68638]: DEBUG oslo_vmware.api [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834408, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.583088] env[68638]: DEBUG nova.compute.manager [req-92824d3c-d557-4411-bbc5-e6752c750f91 req-0fdd37fe-0d24-4b9f-8d82-5e5ed4470dba service nova] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Detach interface failed, port_id=f587042f-8bad-458a-9cd7-16e741d597ca, reason: Instance 4a0c0188-69bb-441e-a930-ab20be5b2319 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1123.652357] env[68638]: DEBUG nova.virt.hardware [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1123.652607] env[68638]: DEBUG nova.virt.hardware [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1123.652759] env[68638]: DEBUG nova.virt.hardware [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1123.653053] env[68638]: DEBUG nova.virt.hardware [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1123.653128] env[68638]: DEBUG nova.virt.hardware [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1123.654133] env[68638]: DEBUG nova.virt.hardware [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1123.654133] env[68638]: DEBUG nova.virt.hardware [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1123.654133] env[68638]: DEBUG nova.virt.hardware [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1123.654133] env[68638]: DEBUG nova.virt.hardware [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1123.654133] env[68638]: DEBUG nova.virt.hardware [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1123.654133] env[68638]: DEBUG nova.virt.hardware [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1123.662020] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5fc6202f-a63b-4500-b9ef-93ef58d10efc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.678045] env[68638]: DEBUG oslo_vmware.api [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1123.678045] env[68638]: value = "task-2834409" [ 1123.678045] env[68638]: _type = "Task" [ 1123.678045] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.687478] env[68638]: DEBUG oslo_vmware.api [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834409, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.773281] env[68638]: DEBUG nova.objects.base [None req-1b79ef3b-f260-4ef6-a9ca-598579cf5df0 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Object Instance<58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9> lazy-loaded attributes: flavor,info_cache {{(pid=68638) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1123.882029] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5d4666c3-9fd4-42fd-a758-2ed72afbb1b4 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "f9bd4416-b2c3-4bdd-9066-08935d304765" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.168s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.920808] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 43e0eed3-bc25-476d-a9ef-6b132514cf90] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1123.968310] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834407, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.999705] env[68638]: INFO nova.compute.manager [-] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Took 1.31 seconds to deallocate network for instance. [ 1124.068378] env[68638]: DEBUG oslo_vmware.api [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834408, 'name': PowerOffVM_Task, 'duration_secs': 0.269478} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.068657] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1124.069499] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e18ec53e-0d43-4f9f-b15c-957510ef6b45 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.091127] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b48d73c-dd52-430b-9aae-389e5a5e9d54 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.120965] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1124.120965] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bcf7f035-35d1-4c86-adae-6a64ee3389e2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.129144] env[68638]: DEBUG oslo_vmware.api [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1124.129144] env[68638]: value = "task-2834410" [ 1124.129144] env[68638]: _type = "Task" [ 1124.129144] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.136808] env[68638]: DEBUG oslo_vmware.api [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834410, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.188733] env[68638]: DEBUG oslo_vmware.api [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834409, 'name': ReconfigVM_Task, 'duration_secs': 0.22199} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.189159] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Updating instance 'ba07529b-e6d0-4c22-b938-c4908a7eafd7' progress to 33 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1124.228357] env[68638]: DEBUG nova.network.neutron [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Port ef048785-d375-47e3-9f3c-2f26fd1bb175 binding to destination host cpu-1 is already ACTIVE {{(pid=68638) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1124.424739] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: e9b8e5ad-4d47-48ad-995f-b28d0230df0f] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1124.468235] env[68638]: DEBUG oslo_vmware.api [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834407, 'name': PowerOnVM_Task, 'duration_secs': 0.811502} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.468591] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1124.468709] env[68638]: INFO nova.compute.manager [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Took 10.84 seconds to spawn the instance on the hypervisor. [ 1124.468885] env[68638]: DEBUG nova.compute.manager [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1124.469705] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ef012c-598c-484a-bdad-551a8cf23c0e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.480517] env[68638]: DEBUG nova.network.neutron [None req-1b79ef3b-f260-4ef6-a9ca-598579cf5df0 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Updating instance_info_cache with network_info: [{"id": "ed59a3ef-d65c-48e6-9271-4552c024c365", "address": "fa:16:3e:18:6d:07", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped59a3ef-d6", "ovs_interfaceid": "ed59a3ef-d65c-48e6-9271-4552c024c365", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1124.491361] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "25c35c36-71c9-48cd-b7e4-6293eef890e5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.491576] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "25c35c36-71c9-48cd-b7e4-6293eef890e5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.505867] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.506167] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.506387] env[68638]: DEBUG nova.objects.instance [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lazy-loading 'resources' on Instance uuid 4a0c0188-69bb-441e-a930-ab20be5b2319 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1124.640927] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] VM already powered off {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1124.641334] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1124.641610] env[68638]: DEBUG oslo_concurrency.lockutils [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1124.641768] env[68638]: DEBUG oslo_concurrency.lockutils [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1124.641949] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1124.642228] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e716bf5f-fcee-4ebb-b817-8b3276600ff7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.658273] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1124.658460] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1124.659199] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1dd5fa5-2fc0-407f-ad9f-7ddd6a9d3b23 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.664914] env[68638]: DEBUG oslo_vmware.api [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1124.664914] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52b0f8a5-a7d0-61ae-2848-8349ac735d0f" [ 1124.664914] env[68638]: _type = "Task" [ 1124.664914] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.672600] env[68638]: DEBUG oslo_vmware.api [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b0f8a5-a7d0-61ae-2848-8349ac735d0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.696807] env[68638]: DEBUG nova.virt.hardware [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1124.697055] env[68638]: DEBUG nova.virt.hardware [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1124.697219] env[68638]: DEBUG nova.virt.hardware [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1124.697401] env[68638]: DEBUG nova.virt.hardware [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1124.697548] env[68638]: DEBUG nova.virt.hardware [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1124.697736] env[68638]: DEBUG nova.virt.hardware [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1124.697961] env[68638]: DEBUG nova.virt.hardware [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1124.698171] env[68638]: DEBUG nova.virt.hardware [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1124.698351] env[68638]: DEBUG nova.virt.hardware [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1124.698517] env[68638]: DEBUG nova.virt.hardware [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1124.698689] env[68638]: DEBUG nova.virt.hardware [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1124.704042] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Reconfiguring VM instance instance-0000006d to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1124.704314] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b37c91f1-6c17-4ecf-bd9c-748a99913e87 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.723825] env[68638]: DEBUG oslo_vmware.api [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1124.723825] env[68638]: value = "task-2834411" [ 1124.723825] env[68638]: _type = "Task" [ 1124.723825] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.731840] env[68638]: DEBUG oslo_vmware.api [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834411, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.835149] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.835394] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.928077] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 32d43fce-837d-41d9-be11-a0c3cdb1694b] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1124.983572] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1b79ef3b-f260-4ef6-a9ca-598579cf5df0 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Releasing lock "refresh_cache-58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1124.988067] env[68638]: INFO nova.compute.manager [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Took 15.80 seconds to build instance. [ 1124.993445] env[68638]: DEBUG nova.compute.manager [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1125.175821] env[68638]: DEBUG oslo_vmware.api [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b0f8a5-a7d0-61ae-2848-8349ac735d0f, 'name': SearchDatastore_Task, 'duration_secs': 0.038711} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.179046] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b639cf5b-cc03-4115-87d4-15c6441b7adc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.184067] env[68638]: DEBUG oslo_vmware.api [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1125.184067] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d18c0c-28f4-6f39-4dd4-d3414bba4a0d" [ 1125.184067] env[68638]: _type = "Task" [ 1125.184067] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.192933] env[68638]: DEBUG oslo_vmware.api [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d18c0c-28f4-6f39-4dd4-d3414bba4a0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.235027] env[68638]: DEBUG oslo_vmware.api [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834411, 'name': ReconfigVM_Task, 'duration_secs': 0.176113} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.235240] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Reconfigured VM instance instance-0000006d to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1125.236030] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d63739-2dc2-4722-b8d4-e71e996e0e37 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.255081] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "e0903192-4fa7-437a-9023-33e8e65124e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1125.255349] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "e0903192-4fa7-437a-9023-33e8e65124e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.255525] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "e0903192-4fa7-437a-9023-33e8e65124e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.274904] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] ba07529b-e6d0-4c22-b938-c4908a7eafd7/ba07529b-e6d0-4c22-b938-c4908a7eafd7.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1125.278975] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0232bd75-3e0d-4c5b-bb6e-617f7a6d04b3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.299502] env[68638]: DEBUG oslo_vmware.api [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1125.299502] env[68638]: value = "task-2834412" [ 1125.299502] env[68638]: _type = "Task" [ 1125.299502] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.311988] env[68638]: DEBUG oslo_vmware.api [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834412, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.337541] env[68638]: DEBUG nova.compute.manager [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1125.395246] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d5f22e-824a-4103-9457-e926e3ddc5cc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.403465] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-001bd140-0bd6-4a9f-b13d-298ae50149d2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.434456] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: ea8f58dc-1542-4723-bf86-369d4dff5f25] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1125.437185] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7187891b-cd36-421a-a450-b566255cefb6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.446389] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b23753e-00a3-4c41-8d1e-fb6168823c12 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.463332] env[68638]: DEBUG nova.compute.provider_tree [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1125.490047] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe2ced42-f421-4992-9e81-2bf712c3a4f8 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.312s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.513294] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1125.695335] env[68638]: DEBUG oslo_vmware.api [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d18c0c-28f4-6f39-4dd4-d3414bba4a0d, 'name': SearchDatastore_Task, 'duration_secs': 0.03872} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.695673] env[68638]: DEBUG oslo_concurrency.lockutils [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1125.695989] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] ace44b04-6dcf-4845-af4e-b28ddeebe60e/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9-rescue.vmdk. {{(pid=68638) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1125.696337] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a72dd47a-f2e8-4e4d-80d4-49b6b356db2e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.704343] env[68638]: DEBUG oslo_vmware.api [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1125.704343] env[68638]: value = "task-2834413" [ 1125.704343] env[68638]: _type = "Task" [ 1125.704343] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.712808] env[68638]: DEBUG oslo_vmware.api [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834413, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.809471] env[68638]: DEBUG oslo_vmware.api [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834412, 'name': ReconfigVM_Task, 'duration_secs': 0.290682} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.809762] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Reconfigured VM instance instance-0000006d to attach disk [datastore1] ba07529b-e6d0-4c22-b938-c4908a7eafd7/ba07529b-e6d0-4c22-b938-c4908a7eafd7.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1125.810066] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Updating instance 'ba07529b-e6d0-4c22-b938-c4908a7eafd7' progress to 50 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1125.861547] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1125.941638] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 94af9123-435f-4ae4-8b6d-82838df61d4e] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1125.967725] env[68638]: DEBUG nova.scheduler.client.report [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1125.993029] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b79ef3b-f260-4ef6-a9ca-598579cf5df0 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1125.993029] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-efa13799-d6f6-41ce-a785-612687dd3a59 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.003204] env[68638]: DEBUG oslo_vmware.api [None req-1b79ef3b-f260-4ef6-a9ca-598579cf5df0 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1126.003204] env[68638]: value = "task-2834414" [ 1126.003204] env[68638]: _type = "Task" [ 1126.003204] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.015408] env[68638]: DEBUG oslo_vmware.api [None req-1b79ef3b-f260-4ef6-a9ca-598579cf5df0 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834414, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.215366] env[68638]: DEBUG oslo_vmware.api [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834413, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.289191] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "refresh_cache-e0903192-4fa7-437a-9023-33e8e65124e3" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.289477] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired lock "refresh_cache-e0903192-4fa7-437a-9023-33e8e65124e3" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1126.289597] env[68638]: DEBUG nova.network.neutron [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1126.316950] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb8a703-8010-4c75-9a83-0b7998d75063 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.338644] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf0578b-45ca-4674-b950-01136527f638 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.356793] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Updating instance 'ba07529b-e6d0-4c22-b938-c4908a7eafd7' progress to 67 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1126.369058] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c72f729a-02a7-425a-922d-62def7b155b3 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1126.369323] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c72f729a-02a7-425a-922d-62def7b155b3 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1126.369501] env[68638]: DEBUG nova.compute.manager [None req-c72f729a-02a7-425a-922d-62def7b155b3 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1126.370383] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-864c4b8e-d990-419c-abc4-c851110c1236 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.377742] env[68638]: DEBUG nova.compute.manager [None req-c72f729a-02a7-425a-922d-62def7b155b3 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68638) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1126.379028] env[68638]: DEBUG nova.objects.instance [None req-c72f729a-02a7-425a-922d-62def7b155b3 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lazy-loading 'flavor' on Instance uuid 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1126.445877] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 61b9bce5-6a3e-4149-a759-d08e2e2301ee] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1126.473826] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.967s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1126.476858] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.964s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1126.478928] env[68638]: INFO nova.compute.claims [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1126.492763] env[68638]: INFO nova.scheduler.client.report [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Deleted allocations for instance 4a0c0188-69bb-441e-a930-ab20be5b2319 [ 1126.514742] env[68638]: DEBUG oslo_vmware.api [None req-1b79ef3b-f260-4ef6-a9ca-598579cf5df0 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834414, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.717064] env[68638]: DEBUG oslo_vmware.api [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834413, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.536828} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.717064] env[68638]: INFO nova.virt.vmwareapi.ds_util [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] ace44b04-6dcf-4845-af4e-b28ddeebe60e/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9-rescue.vmdk. [ 1126.717277] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e8d4aad-eaf2-41e9-9d8d-803615770fac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.746709] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] ace44b04-6dcf-4845-af4e-b28ddeebe60e/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9-rescue.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1126.747092] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb2f4d2a-0e77-4c0c-83da-3d6a2be18c2f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.766357] env[68638]: DEBUG oslo_vmware.api [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1126.766357] env[68638]: value = "task-2834415" [ 1126.766357] env[68638]: _type = "Task" [ 1126.766357] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.776054] env[68638]: DEBUG oslo_vmware.api [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834415, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.949305] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: cd27220d-c706-4450-a01b-c871c608056f] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1126.951853] env[68638]: DEBUG nova.network.neutron [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Port 22c8d069-e6d1-4644-89d8-516903e4ef3d binding to destination host cpu-1 is already ACTIVE {{(pid=68638) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1127.003346] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3550c342-3632-4a88-9e76-82e9569fd625 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "4a0c0188-69bb-441e-a930-ab20be5b2319" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.668s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1127.019542] env[68638]: DEBUG oslo_vmware.api [None req-1b79ef3b-f260-4ef6-a9ca-598579cf5df0 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834414, 'name': PowerOnVM_Task, 'duration_secs': 0.76351} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.024042] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b79ef3b-f260-4ef6-a9ca-598579cf5df0 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1127.024375] env[68638]: DEBUG nova.compute.manager [None req-1b79ef3b-f260-4ef6-a9ca-598579cf5df0 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1127.026420] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ca71e93-ca22-4875-96a1-7ccffbcd981e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.133159] env[68638]: DEBUG nova.network.neutron [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Updating instance_info_cache with network_info: [{"id": "ef048785-d375-47e3-9f3c-2f26fd1bb175", "address": "fa:16:3e:65:11:44", "network": {"id": "2181efd7-a094-4c4b-8754-da82e89be85a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1274773453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "efa342b9d9a34e9e8e708c8f356f905e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef048785-d3", "ovs_interfaceid": "ef048785-d375-47e3-9f3c-2f26fd1bb175", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1127.276893] env[68638]: DEBUG oslo_vmware.api [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834415, 'name': ReconfigVM_Task, 'duration_secs': 0.4173} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.277208] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Reconfigured VM instance instance-00000065 to attach disk [datastore2] ace44b04-6dcf-4845-af4e-b28ddeebe60e/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9-rescue.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1127.278067] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-484352c4-3f0e-417d-8774-2ea8af2f855e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.306740] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7bb73a41-c805-449d-a2cb-2ed516ba45eb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.321898] env[68638]: DEBUG oslo_vmware.api [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1127.321898] env[68638]: value = "task-2834416" [ 1127.321898] env[68638]: _type = "Task" [ 1127.321898] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.329810] env[68638]: DEBUG oslo_vmware.api [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834416, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.385701] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c72f729a-02a7-425a-922d-62def7b155b3 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1127.386037] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c5da3332-1da7-4e4a-8b37-5779f27f12b8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.394808] env[68638]: DEBUG oslo_vmware.api [None req-c72f729a-02a7-425a-922d-62def7b155b3 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1127.394808] env[68638]: value = "task-2834417" [ 1127.394808] env[68638]: _type = "Task" [ 1127.394808] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.403685] env[68638]: DEBUG oslo_vmware.api [None req-c72f729a-02a7-425a-922d-62def7b155b3 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834417, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.453871] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 30193a76-a391-4a64-98cc-7e22dcf7218c] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1127.636255] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Releasing lock "refresh_cache-e0903192-4fa7-437a-9023-33e8e65124e3" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1127.725424] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-980bb331-6b11-4dd8-a381-9b0892dec226 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.733954] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ba83fe-46ef-46ed-ac97-7638d1695a02 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.768025] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d1419b-7b75-4e5a-8951-216e937cd528 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.776170] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a0fc31-d7e0-41a9-b15d-b5e6433b5327 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.790362] env[68638]: DEBUG nova.compute.provider_tree [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1127.832512] env[68638]: DEBUG oslo_vmware.api [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834416, 'name': ReconfigVM_Task, 'duration_secs': 0.195095} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.832803] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1127.833062] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f5240a3-31dd-445e-a062-16e656755b6a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.840127] env[68638]: DEBUG oslo_vmware.api [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1127.840127] env[68638]: value = "task-2834418" [ 1127.840127] env[68638]: _type = "Task" [ 1127.840127] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.848217] env[68638]: DEBUG oslo_vmware.api [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834418, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.905172] env[68638]: DEBUG oslo_vmware.api [None req-c72f729a-02a7-425a-922d-62def7b155b3 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834417, 'name': PowerOffVM_Task, 'duration_secs': 0.207193} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.905473] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c72f729a-02a7-425a-922d-62def7b155b3 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1127.905762] env[68638]: DEBUG nova.compute.manager [None req-c72f729a-02a7-425a-922d-62def7b155b3 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1127.906620] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51235900-c180-4155-b138-d67b817d06ef {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.957868] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 9bfcfa7d-2754-421c-ac2e-cb73a69cf1d9] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1127.977690] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "ba07529b-e6d0-4c22-b938-c4908a7eafd7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1127.977981] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "ba07529b-e6d0-4c22-b938-c4908a7eafd7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1127.979233] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "ba07529b-e6d0-4c22-b938-c4908a7eafd7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.147610] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c40021ea-65af-4c87-a1fd-d4d491d092a0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.157486] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd7d405-1e3b-454c-bfc2-278cbe4f458d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.294629] env[68638]: DEBUG nova.scheduler.client.report [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1128.352389] env[68638]: DEBUG oslo_vmware.api [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834418, 'name': PowerOnVM_Task, 'duration_secs': 0.419965} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.353028] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1128.355807] env[68638]: DEBUG nova.compute.manager [None req-62b3953d-e161-4a11-a6f4-c256c4ba2768 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1128.356741] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35195ae9-cd3e-4624-8f3b-8bccf53ae9bf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.420393] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c72f729a-02a7-425a-922d-62def7b155b3 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.051s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.461172] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: d2d30008-5058-4be3-b803-00d8ca4450d5] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1128.799734] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.323s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.800311] env[68638]: DEBUG nova.compute.manager [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1128.803012] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.942s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.804504] env[68638]: INFO nova.compute.claims [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1128.965192] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 1bc685aa-4e88-402f-b581-d179706b12a5] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1129.013065] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "refresh_cache-ba07529b-e6d0-4c22-b938-c4908a7eafd7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.013354] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired lock "refresh_cache-ba07529b-e6d0-4c22-b938-c4908a7eafd7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1129.013545] env[68638]: DEBUG nova.network.neutron [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1129.189983] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1129.190365] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1129.190682] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1129.190962] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1129.191231] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1129.193744] env[68638]: INFO nova.compute.manager [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Terminating instance [ 1129.268178] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4777d0b4-acdd-424b-95ad-3ee119b9068f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.290960] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d9ade4-3e95-42ff-8bcb-8d90a11f6c7c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.299350] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Updating instance 'e0903192-4fa7-437a-9023-33e8e65124e3' progress to 83 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1129.310020] env[68638]: DEBUG nova.compute.utils [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1129.312675] env[68638]: DEBUG nova.compute.manager [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1129.312849] env[68638]: DEBUG nova.network.neutron [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1129.388717] env[68638]: DEBUG nova.policy [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b5dc492be0cd4ce999d61eb28ac3b2e5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ee5d59c43e974d04ba56981f2716ff60', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1129.468660] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 9975e756-b571-4e70-ba50-a6001d0b064c] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1129.698035] env[68638]: DEBUG nova.compute.manager [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1129.698465] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1129.699378] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-806ca25e-f24a-4739-8760-648cc6cb3476 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.708126] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1129.708383] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b1fb97b-9309-4843-b5d6-31747b0e473f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.779120] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1129.779120] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1129.779120] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Deleting the datastore file [datastore1] 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1129.779120] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ebe2a75e-04ee-488d-bed1-dc52e2cbb484 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.787347] env[68638]: DEBUG oslo_vmware.api [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1129.787347] env[68638]: value = "task-2834420" [ 1129.787347] env[68638]: _type = "Task" [ 1129.787347] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.795838] env[68638]: DEBUG oslo_vmware.api [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834420, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.805844] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1129.806142] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7cd4ae70-eb2a-43b8-8839-902f1b316f98 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.813647] env[68638]: DEBUG nova.compute.manager [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1129.820027] env[68638]: DEBUG oslo_vmware.api [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1129.820027] env[68638]: value = "task-2834421" [ 1129.820027] env[68638]: _type = "Task" [ 1129.820027] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.830243] env[68638]: DEBUG oslo_vmware.api [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834421, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.861561] env[68638]: DEBUG nova.network.neutron [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Updating instance_info_cache with network_info: [{"id": "22c8d069-e6d1-4644-89d8-516903e4ef3d", "address": "fa:16:3e:07:da:cc", "network": {"id": "e7719a30-81aa-48f1-a272-5246f78d9891", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1890376720-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fdd5447a0546b7b0fe2ed9ea0efc73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22c8d069-e6", "ovs_interfaceid": "22c8d069-e6d1-4644-89d8-516903e4ef3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.881359] env[68638]: DEBUG nova.network.neutron [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Successfully created port: 3199e8a3-335c-43ff-be19-3881b85a0203 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1129.975706] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: fd6d5951-f2a1-422d-b137-4d19759f9060] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1130.068864] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aeaf6e5-227e-4bc1-9fec-68e0974a174d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.077642] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e8d1af0-160c-4587-81b4-3ef2fae4d108 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.111105] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78609b5c-250e-4407-80c9-7949b7980707 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.123400] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a0b485-5bcc-4135-944c-32999080d099 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.137795] env[68638]: INFO nova.compute.manager [None req-f958fbc1-ff6a-4537-b81b-e62cd7f2c532 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Unrescuing [ 1130.138185] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f958fbc1-ff6a-4537-b81b-e62cd7f2c532 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "refresh_cache-ace44b04-6dcf-4845-af4e-b28ddeebe60e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.138405] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f958fbc1-ff6a-4537-b81b-e62cd7f2c532 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquired lock "refresh_cache-ace44b04-6dcf-4845-af4e-b28ddeebe60e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1130.138614] env[68638]: DEBUG nova.network.neutron [None req-f958fbc1-ff6a-4537-b81b-e62cd7f2c532 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1130.140301] env[68638]: DEBUG nova.compute.provider_tree [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1130.298578] env[68638]: DEBUG oslo_vmware.api [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834420, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160198} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.298849] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1130.299074] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1130.299268] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1130.299441] env[68638]: INFO nova.compute.manager [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1130.299677] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1130.299872] env[68638]: DEBUG nova.compute.manager [-] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1130.299965] env[68638]: DEBUG nova.network.neutron [-] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1130.336112] env[68638]: DEBUG oslo_vmware.api [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834421, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.369033] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Releasing lock "refresh_cache-ba07529b-e6d0-4c22-b938-c4908a7eafd7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1130.481135] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 9ba0f737-7947-409c-9163-79d621a29285] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1130.604831] env[68638]: DEBUG nova.compute.manager [req-82041484-9db1-4937-8c75-b3df40e15a68 req-21b7a87f-bcb6-49f5-b149-7fcfec3dc936 service nova] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Received event network-vif-deleted-efe8e56e-2095-427b-ab7a-31cbdb0521ca {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1130.604963] env[68638]: INFO nova.compute.manager [req-82041484-9db1-4937-8c75-b3df40e15a68 req-21b7a87f-bcb6-49f5-b149-7fcfec3dc936 service nova] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Neutron deleted interface efe8e56e-2095-427b-ab7a-31cbdb0521ca; detaching it from the instance and deleting it from the info cache [ 1130.605154] env[68638]: DEBUG nova.network.neutron [req-82041484-9db1-4937-8c75-b3df40e15a68 req-21b7a87f-bcb6-49f5-b149-7fcfec3dc936 service nova] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.648291] env[68638]: DEBUG nova.scheduler.client.report [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1130.830894] env[68638]: DEBUG nova.compute.manager [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1130.839472] env[68638]: DEBUG oslo_vmware.api [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834421, 'name': PowerOnVM_Task, 'duration_secs': 0.786619} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.839747] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1130.839930] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1f61a271-db09-4d34-98c4-cd798e608d64 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Updating instance 'e0903192-4fa7-437a-9023-33e8e65124e3' progress to 100 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1130.862033] env[68638]: DEBUG nova.virt.hardware [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1130.862296] env[68638]: DEBUG nova.virt.hardware [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1130.862452] env[68638]: DEBUG nova.virt.hardware [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1130.862629] env[68638]: DEBUG nova.virt.hardware [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1130.862834] env[68638]: DEBUG nova.virt.hardware [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1130.863056] env[68638]: DEBUG nova.virt.hardware [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1130.863279] env[68638]: DEBUG nova.virt.hardware [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1130.863442] env[68638]: DEBUG nova.virt.hardware [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1130.863673] env[68638]: DEBUG nova.virt.hardware [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1130.863851] env[68638]: DEBUG nova.virt.hardware [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1130.864234] env[68638]: DEBUG nova.virt.hardware [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1130.864907] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acac24eb-efdb-4bec-b2ca-776974b85690 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.876774] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48a4c8ac-6c40-43b5-a1d2-5d2c06fc2389 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.897762] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9933ba54-3619-4348-9f5c-ee536f172c30 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.918920] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06814a94-8f4f-456c-b3ec-81db50f40797 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.927678] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Updating instance 'ba07529b-e6d0-4c22-b938-c4908a7eafd7' progress to 83 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1130.984613] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 3c3fcbca-2477-4037-a978-4b8e9ed0a690] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1131.080290] env[68638]: DEBUG nova.network.neutron [-] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.107975] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b94b3067-534a-4a0a-8e60-00174619c268 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.119409] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4711263-0c0c-4dbc-a96f-8e2d9c717faa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.158385] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.355s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1131.158930] env[68638]: DEBUG nova.compute.manager [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1131.161679] env[68638]: DEBUG nova.compute.manager [req-82041484-9db1-4937-8c75-b3df40e15a68 req-21b7a87f-bcb6-49f5-b149-7fcfec3dc936 service nova] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Detach interface failed, port_id=efe8e56e-2095-427b-ab7a-31cbdb0521ca, reason: Instance 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1131.244515] env[68638]: DEBUG nova.network.neutron [None req-f958fbc1-ff6a-4537-b81b-e62cd7f2c532 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Updating instance_info_cache with network_info: [{"id": "acf48d4a-b19e-47d9-a807-d221c4f0fd05", "address": "fa:16:3e:93:77:d3", "network": {"id": "5f368894-f202-48ed-bdd5-62442b47a35d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2025484418-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e7777e8e5d342d68e2f54e23d125314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacf48d4a-b1", "ovs_interfaceid": "acf48d4a-b19e-47d9-a807-d221c4f0fd05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.434102] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1131.434439] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-651a2401-3e5c-4d05-b758-065109cf5953 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.457018] env[68638]: DEBUG oslo_vmware.api [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1131.457018] env[68638]: value = "task-2834422" [ 1131.457018] env[68638]: _type = "Task" [ 1131.457018] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.466512] env[68638]: DEBUG oslo_vmware.api [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834422, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.480328] env[68638]: DEBUG nova.network.neutron [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Successfully updated port: 3199e8a3-335c-43ff-be19-3881b85a0203 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1131.487931] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 92c90438-f7cc-4a48-bfac-f7912709cf88] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1131.582869] env[68638]: INFO nova.compute.manager [-] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Took 1.28 seconds to deallocate network for instance. [ 1131.664011] env[68638]: DEBUG nova.compute.utils [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1131.665401] env[68638]: DEBUG nova.compute.manager [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1131.665575] env[68638]: DEBUG nova.network.neutron [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1131.708549] env[68638]: DEBUG nova.policy [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '815b8ce8a95a4f76a28506fe20117298', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2ae89c3992e04141bf24be9d9e84e302', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1131.751878] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f958fbc1-ff6a-4537-b81b-e62cd7f2c532 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Releasing lock "refresh_cache-ace44b04-6dcf-4845-af4e-b28ddeebe60e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1131.752557] env[68638]: DEBUG nova.objects.instance [None req-f958fbc1-ff6a-4537-b81b-e62cd7f2c532 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lazy-loading 'flavor' on Instance uuid ace44b04-6dcf-4845-af4e-b28ddeebe60e {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1131.968271] env[68638]: DEBUG oslo_vmware.api [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834422, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.983273] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "refresh_cache-25c35c36-71c9-48cd-b7e4-6293eef890e5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.983642] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquired lock "refresh_cache-25c35c36-71c9-48cd-b7e4-6293eef890e5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1131.983954] env[68638]: DEBUG nova.network.neutron [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1131.996113] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: d49fdd3f-3ad6-4396-811f-67f1ef1f2940] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1132.090339] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1132.090637] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.090863] env[68638]: DEBUG nova.objects.instance [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lazy-loading 'resources' on Instance uuid 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1132.122604] env[68638]: DEBUG nova.network.neutron [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Successfully created port: 95615d9a-8948-4dd3-bc9d-c65df08cd7a8 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1132.169324] env[68638]: DEBUG nova.compute.manager [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1132.258439] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-663cd7b2-308b-4915-89f4-f16edf4b52af {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.285304] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f958fbc1-ff6a-4537-b81b-e62cd7f2c532 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1132.285703] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-81dd78ae-7c1e-40a4-9749-28561dc7ad4f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.295501] env[68638]: DEBUG oslo_vmware.api [None req-f958fbc1-ff6a-4537-b81b-e62cd7f2c532 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1132.295501] env[68638]: value = "task-2834423" [ 1132.295501] env[68638]: _type = "Task" [ 1132.295501] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.308439] env[68638]: DEBUG oslo_vmware.api [None req-f958fbc1-ff6a-4537-b81b-e62cd7f2c532 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834423, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.468453] env[68638]: DEBUG oslo_vmware.api [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834422, 'name': PowerOnVM_Task, 'duration_secs': 0.910059} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.468799] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1132.469011] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3d25b526-64fb-4b0c-894d-d87c0ef91417 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Updating instance 'ba07529b-e6d0-4c22-b938-c4908a7eafd7' progress to 100 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1132.500539] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: c07f6e3a-86cf-4584-aa5e-5adc4bf086e3] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1132.523950] env[68638]: DEBUG nova.network.neutron [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1132.730201] env[68638]: DEBUG nova.compute.manager [req-65418082-1cc5-4813-a2d4-ce0e70573971 req-40581e67-e18c-4141-8e1a-0821c0764672 service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Received event network-vif-plugged-3199e8a3-335c-43ff-be19-3881b85a0203 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1132.730201] env[68638]: DEBUG oslo_concurrency.lockutils [req-65418082-1cc5-4813-a2d4-ce0e70573971 req-40581e67-e18c-4141-8e1a-0821c0764672 service nova] Acquiring lock "25c35c36-71c9-48cd-b7e4-6293eef890e5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1132.730201] env[68638]: DEBUG oslo_concurrency.lockutils [req-65418082-1cc5-4813-a2d4-ce0e70573971 req-40581e67-e18c-4141-8e1a-0821c0764672 service nova] Lock "25c35c36-71c9-48cd-b7e4-6293eef890e5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.730201] env[68638]: DEBUG oslo_concurrency.lockutils [req-65418082-1cc5-4813-a2d4-ce0e70573971 req-40581e67-e18c-4141-8e1a-0821c0764672 service nova] Lock "25c35c36-71c9-48cd-b7e4-6293eef890e5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.730201] env[68638]: DEBUG nova.compute.manager [req-65418082-1cc5-4813-a2d4-ce0e70573971 req-40581e67-e18c-4141-8e1a-0821c0764672 service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] No waiting events found dispatching network-vif-plugged-3199e8a3-335c-43ff-be19-3881b85a0203 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1132.730201] env[68638]: WARNING nova.compute.manager [req-65418082-1cc5-4813-a2d4-ce0e70573971 req-40581e67-e18c-4141-8e1a-0821c0764672 service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Received unexpected event network-vif-plugged-3199e8a3-335c-43ff-be19-3881b85a0203 for instance with vm_state building and task_state spawning. [ 1132.731186] env[68638]: DEBUG nova.compute.manager [req-65418082-1cc5-4813-a2d4-ce0e70573971 req-40581e67-e18c-4141-8e1a-0821c0764672 service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Received event network-changed-3199e8a3-335c-43ff-be19-3881b85a0203 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1132.731504] env[68638]: DEBUG nova.compute.manager [req-65418082-1cc5-4813-a2d4-ce0e70573971 req-40581e67-e18c-4141-8e1a-0821c0764672 service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Refreshing instance network info cache due to event network-changed-3199e8a3-335c-43ff-be19-3881b85a0203. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1132.731823] env[68638]: DEBUG oslo_concurrency.lockutils [req-65418082-1cc5-4813-a2d4-ce0e70573971 req-40581e67-e18c-4141-8e1a-0821c0764672 service nova] Acquiring lock "refresh_cache-25c35c36-71c9-48cd-b7e4-6293eef890e5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.757280] env[68638]: DEBUG nova.network.neutron [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Updating instance_info_cache with network_info: [{"id": "3199e8a3-335c-43ff-be19-3881b85a0203", "address": "fa:16:3e:e5:0e:d2", "network": {"id": "4723bf0f-71b1-4997-b6ce-06f7dbda84df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-488516254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee5d59c43e974d04ba56981f2716ff60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3199e8a3-33", "ovs_interfaceid": "3199e8a3-335c-43ff-be19-3881b85a0203", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.805778] env[68638]: DEBUG oslo_vmware.api [None req-f958fbc1-ff6a-4537-b81b-e62cd7f2c532 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834423, 'name': PowerOffVM_Task, 'duration_secs': 0.344053} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.808332] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f958fbc1-ff6a-4537-b81b-e62cd7f2c532 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1132.813707] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-f958fbc1-ff6a-4537-b81b-e62cd7f2c532 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Reconfiguring VM instance instance-00000065 to detach disk 2002 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1132.814737] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e9c68d5-5b66-4e4e-8ed1-8d573c0ad4dc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.836862] env[68638]: DEBUG oslo_vmware.api [None req-f958fbc1-ff6a-4537-b81b-e62cd7f2c532 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1132.836862] env[68638]: value = "task-2834424" [ 1132.836862] env[68638]: _type = "Task" [ 1132.836862] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.848195] env[68638]: DEBUG oslo_vmware.api [None req-f958fbc1-ff6a-4537-b81b-e62cd7f2c532 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834424, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.902228] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a57e72d4-8b43-423b-b788-c4dc2a3ed5ed {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.911309] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da757793-36df-4560-93b8-38304ee94ee6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.948349] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a875ca1f-e5c2-4875-ab09-23245ac5b905 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.956954] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a701d21-f471-4945-ba83-67e9aeadbf7c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.972527] env[68638]: DEBUG nova.compute.provider_tree [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1132.995431] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f72bc754-ec9b-481b-90be-21bc2ee8c84e tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "e0903192-4fa7-437a-9023-33e8e65124e3" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1132.995577] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f72bc754-ec9b-481b-90be-21bc2ee8c84e tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "e0903192-4fa7-437a-9023-33e8e65124e3" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.995787] env[68638]: DEBUG nova.compute.manager [None req-f72bc754-ec9b-481b-90be-21bc2ee8c84e tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Going to confirm migration 3 {{(pid=68638) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1133.003444] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 6200613c-b5de-4774-b0c6-fdb78b4c7267] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1133.183273] env[68638]: DEBUG nova.compute.manager [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1133.204671] env[68638]: DEBUG nova.virt.hardware [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1133.205010] env[68638]: DEBUG nova.virt.hardware [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1133.205179] env[68638]: DEBUG nova.virt.hardware [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1133.205359] env[68638]: DEBUG nova.virt.hardware [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1133.205506] env[68638]: DEBUG nova.virt.hardware [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1133.205654] env[68638]: DEBUG nova.virt.hardware [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1133.205862] env[68638]: DEBUG nova.virt.hardware [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1133.206116] env[68638]: DEBUG nova.virt.hardware [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1133.206204] env[68638]: DEBUG nova.virt.hardware [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1133.206363] env[68638]: DEBUG nova.virt.hardware [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1133.206564] env[68638]: DEBUG nova.virt.hardware [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1133.207477] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f32a72e3-208a-4bd8-a51e-de2e29ef8752 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.216144] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16cd003a-6a37-4ab8-9e70-97923341c299 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.259832] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Releasing lock "refresh_cache-25c35c36-71c9-48cd-b7e4-6293eef890e5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1133.260066] env[68638]: DEBUG nova.compute.manager [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Instance network_info: |[{"id": "3199e8a3-335c-43ff-be19-3881b85a0203", "address": "fa:16:3e:e5:0e:d2", "network": {"id": "4723bf0f-71b1-4997-b6ce-06f7dbda84df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-488516254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee5d59c43e974d04ba56981f2716ff60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3199e8a3-33", "ovs_interfaceid": "3199e8a3-335c-43ff-be19-3881b85a0203", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1133.260451] env[68638]: DEBUG oslo_concurrency.lockutils [req-65418082-1cc5-4813-a2d4-ce0e70573971 req-40581e67-e18c-4141-8e1a-0821c0764672 service nova] Acquired lock "refresh_cache-25c35c36-71c9-48cd-b7e4-6293eef890e5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1133.260621] env[68638]: DEBUG nova.network.neutron [req-65418082-1cc5-4813-a2d4-ce0e70573971 req-40581e67-e18c-4141-8e1a-0821c0764672 service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Refreshing network info cache for port 3199e8a3-335c-43ff-be19-3881b85a0203 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1133.261877] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:0e:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd1da5fc2-0280-4f76-ac97-20ea4bc7bb16', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3199e8a3-335c-43ff-be19-3881b85a0203', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1133.269404] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1133.269836] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1133.270079] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9eeb9834-66ef-4d04-b000-dc0dda654071 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.292123] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1133.292123] env[68638]: value = "task-2834425" [ 1133.292123] env[68638]: _type = "Task" [ 1133.292123] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.302756] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834425, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.346751] env[68638]: DEBUG oslo_vmware.api [None req-f958fbc1-ff6a-4537-b81b-e62cd7f2c532 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834424, 'name': ReconfigVM_Task, 'duration_secs': 0.301013} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.347091] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-f958fbc1-ff6a-4537-b81b-e62cd7f2c532 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Reconfigured VM instance instance-00000065 to detach disk 2002 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1133.347316] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f958fbc1-ff6a-4537-b81b-e62cd7f2c532 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1133.347583] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f7844280-7422-4939-a4a6-ff73f0d713c1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.357456] env[68638]: DEBUG oslo_vmware.api [None req-f958fbc1-ff6a-4537-b81b-e62cd7f2c532 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1133.357456] env[68638]: value = "task-2834426" [ 1133.357456] env[68638]: _type = "Task" [ 1133.357456] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.368393] env[68638]: DEBUG oslo_vmware.api [None req-f958fbc1-ff6a-4537-b81b-e62cd7f2c532 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834426, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.480217] env[68638]: DEBUG nova.scheduler.client.report [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1133.490975] env[68638]: DEBUG nova.compute.manager [req-92778d5e-a778-4f10-b24b-6f9bca6240e2 req-b0616524-39e8-4b94-a17a-6dc6908d3b1d service nova] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Received event network-vif-plugged-95615d9a-8948-4dd3-bc9d-c65df08cd7a8 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1133.491205] env[68638]: DEBUG oslo_concurrency.lockutils [req-92778d5e-a778-4f10-b24b-6f9bca6240e2 req-b0616524-39e8-4b94-a17a-6dc6908d3b1d service nova] Acquiring lock "3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1133.491408] env[68638]: DEBUG oslo_concurrency.lockutils [req-92778d5e-a778-4f10-b24b-6f9bca6240e2 req-b0616524-39e8-4b94-a17a-6dc6908d3b1d service nova] Lock "3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1133.491566] env[68638]: DEBUG oslo_concurrency.lockutils [req-92778d5e-a778-4f10-b24b-6f9bca6240e2 req-b0616524-39e8-4b94-a17a-6dc6908d3b1d service nova] Lock "3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1133.491730] env[68638]: DEBUG nova.compute.manager [req-92778d5e-a778-4f10-b24b-6f9bca6240e2 req-b0616524-39e8-4b94-a17a-6dc6908d3b1d service nova] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] No waiting events found dispatching network-vif-plugged-95615d9a-8948-4dd3-bc9d-c65df08cd7a8 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1133.491886] env[68638]: WARNING nova.compute.manager [req-92778d5e-a778-4f10-b24b-6f9bca6240e2 req-b0616524-39e8-4b94-a17a-6dc6908d3b1d service nova] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Received unexpected event network-vif-plugged-95615d9a-8948-4dd3-bc9d-c65df08cd7a8 for instance with vm_state building and task_state spawning. [ 1133.509879] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 17f6cd0a-bbc1-47c3-9c36-2166ba448de2] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1133.610151] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f72bc754-ec9b-481b-90be-21bc2ee8c84e tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "refresh_cache-e0903192-4fa7-437a-9023-33e8e65124e3" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.610355] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f72bc754-ec9b-481b-90be-21bc2ee8c84e tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquired lock "refresh_cache-e0903192-4fa7-437a-9023-33e8e65124e3" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1133.610532] env[68638]: DEBUG nova.network.neutron [None req-f72bc754-ec9b-481b-90be-21bc2ee8c84e tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1133.611242] env[68638]: DEBUG nova.objects.instance [None req-f72bc754-ec9b-481b-90be-21bc2ee8c84e tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lazy-loading 'info_cache' on Instance uuid e0903192-4fa7-437a-9023-33e8e65124e3 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1133.804624] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834425, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.869072] env[68638]: DEBUG oslo_vmware.api [None req-f958fbc1-ff6a-4537-b81b-e62cd7f2c532 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834426, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.987812] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.897s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1133.997473] env[68638]: DEBUG nova.network.neutron [req-65418082-1cc5-4813-a2d4-ce0e70573971 req-40581e67-e18c-4141-8e1a-0821c0764672 service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Updated VIF entry in instance network info cache for port 3199e8a3-335c-43ff-be19-3881b85a0203. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1133.997931] env[68638]: DEBUG nova.network.neutron [req-65418082-1cc5-4813-a2d4-ce0e70573971 req-40581e67-e18c-4141-8e1a-0821c0764672 service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Updating instance_info_cache with network_info: [{"id": "3199e8a3-335c-43ff-be19-3881b85a0203", "address": "fa:16:3e:e5:0e:d2", "network": {"id": "4723bf0f-71b1-4997-b6ce-06f7dbda84df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-488516254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee5d59c43e974d04ba56981f2716ff60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3199e8a3-33", "ovs_interfaceid": "3199e8a3-335c-43ff-be19-3881b85a0203", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.012048] env[68638]: INFO nova.scheduler.client.report [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Deleted allocations for instance 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4 [ 1134.017515] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 14c1dba5-98cb-4ebd-8e76-60b3f74cca4b] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1134.103964] env[68638]: DEBUG nova.network.neutron [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Successfully updated port: 95615d9a-8948-4dd3-bc9d-c65df08cd7a8 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1134.303164] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834425, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.368732] env[68638]: DEBUG oslo_vmware.api [None req-f958fbc1-ff6a-4537-b81b-e62cd7f2c532 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834426, 'name': PowerOnVM_Task, 'duration_secs': 0.531767} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.369030] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f958fbc1-ff6a-4537-b81b-e62cd7f2c532 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1134.369313] env[68638]: DEBUG nova.compute.manager [None req-f958fbc1-ff6a-4537-b81b-e62cd7f2c532 tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1134.370139] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb01c19e-e3ce-4f55-8ec1-cd6058120603 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.502813] env[68638]: DEBUG oslo_concurrency.lockutils [req-65418082-1cc5-4813-a2d4-ce0e70573971 req-40581e67-e18c-4141-8e1a-0821c0764672 service nova] Releasing lock "refresh_cache-25c35c36-71c9-48cd-b7e4-6293eef890e5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1134.519901] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2d472c5c-3f64-4ad4-a71c-1b9ba593b293 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.329s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1134.522578] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 4c954bb4-6291-47d5-a65c-0ad92a0fd193] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1134.606563] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "refresh_cache-3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.606780] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired lock "refresh_cache-3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1134.606969] env[68638]: DEBUG nova.network.neutron [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1134.753845] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0204cba0-fc90-4130-832c-8a037e41278e tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "ba07529b-e6d0-4c22-b938-c4908a7eafd7" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.753845] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0204cba0-fc90-4130-832c-8a037e41278e tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "ba07529b-e6d0-4c22-b938-c4908a7eafd7" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.754031] env[68638]: DEBUG nova.compute.manager [None req-0204cba0-fc90-4130-832c-8a037e41278e tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Going to confirm migration 4 {{(pid=68638) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1134.805509] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834425, 'name': CreateVM_Task, 'duration_secs': 1.378435} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.805681] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1134.806390] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.806560] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1134.806912] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1134.807190] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-497bcbd9-925d-4ba5-b99c-9434c22e1b7a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.816028] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1134.816028] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52fd65fc-d505-4e41-9b85-28597bedb0d6" [ 1134.816028] env[68638]: _type = "Task" [ 1134.816028] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.827434] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52fd65fc-d505-4e41-9b85-28597bedb0d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.861920] env[68638]: DEBUG nova.compute.manager [req-0fbaf1e2-3ba3-40d1-8cef-3b3a20f2e840 req-4463dd80-fc72-4455-85eb-dd2247108b0b service nova] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Received event network-changed-95615d9a-8948-4dd3-bc9d-c65df08cd7a8 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1134.862144] env[68638]: DEBUG nova.compute.manager [req-0fbaf1e2-3ba3-40d1-8cef-3b3a20f2e840 req-4463dd80-fc72-4455-85eb-dd2247108b0b service nova] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Refreshing instance network info cache due to event network-changed-95615d9a-8948-4dd3-bc9d-c65df08cd7a8. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1134.862337] env[68638]: DEBUG oslo_concurrency.lockutils [req-0fbaf1e2-3ba3-40d1-8cef-3b3a20f2e840 req-4463dd80-fc72-4455-85eb-dd2247108b0b service nova] Acquiring lock "refresh_cache-3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.925798] env[68638]: DEBUG oslo_concurrency.lockutils [None req-580e2be9-3d45-4184-9ac5-fd2530492c17 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "c66805eb-fd97-4fe3-984d-8759f227d7fc" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.926774] env[68638]: DEBUG oslo_concurrency.lockutils [None req-580e2be9-3d45-4184-9ac5-fd2530492c17 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "c66805eb-fd97-4fe3-984d-8759f227d7fc" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.935370] env[68638]: DEBUG nova.network.neutron [None req-f72bc754-ec9b-481b-90be-21bc2ee8c84e tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Updating instance_info_cache with network_info: [{"id": "ef048785-d375-47e3-9f3c-2f26fd1bb175", "address": "fa:16:3e:65:11:44", "network": {"id": "2181efd7-a094-4c4b-8754-da82e89be85a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1274773453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "efa342b9d9a34e9e8e708c8f356f905e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69cfa7ba-6989-4d75-9495-97b5fea00c3c", "external-id": "nsx-vlan-transportzone-225", "segmentation_id": 225, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef048785-d3", "ovs_interfaceid": "ef048785-d375-47e3-9f3c-2f26fd1bb175", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.026015] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 02894a47-59b1-475b-b934-c8d0b6dabc5b] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1135.141736] env[68638]: DEBUG nova.network.neutron [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1135.208938] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "2fa9b930-c76c-4cac-a371-a6b9899dc71e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.210173] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "2fa9b930-c76c-4cac-a371-a6b9899dc71e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1135.210173] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "2fa9b930-c76c-4cac-a371-a6b9899dc71e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.210173] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "2fa9b930-c76c-4cac-a371-a6b9899dc71e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1135.210173] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "2fa9b930-c76c-4cac-a371-a6b9899dc71e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1135.211787] env[68638]: INFO nova.compute.manager [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Terminating instance [ 1135.288366] env[68638]: DEBUG nova.network.neutron [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Updating instance_info_cache with network_info: [{"id": "95615d9a-8948-4dd3-bc9d-c65df08cd7a8", "address": "fa:16:3e:71:ee:44", "network": {"id": "4ccf9e56-9fb3-48ff-bf2d-a35faedb905b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1191830363-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ae89c3992e04141bf24be9d9e84e302", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95615d9a-89", "ovs_interfaceid": "95615d9a-8948-4dd3-bc9d-c65df08cd7a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.300026] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0204cba0-fc90-4130-832c-8a037e41278e tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "refresh_cache-ba07529b-e6d0-4c22-b938-c4908a7eafd7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.300217] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0204cba0-fc90-4130-832c-8a037e41278e tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired lock "refresh_cache-ba07529b-e6d0-4c22-b938-c4908a7eafd7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1135.300397] env[68638]: DEBUG nova.network.neutron [None req-0204cba0-fc90-4130-832c-8a037e41278e tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1135.300582] env[68638]: DEBUG nova.objects.instance [None req-0204cba0-fc90-4130-832c-8a037e41278e tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lazy-loading 'info_cache' on Instance uuid ba07529b-e6d0-4c22-b938-c4908a7eafd7 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1135.326925] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52fd65fc-d505-4e41-9b85-28597bedb0d6, 'name': SearchDatastore_Task, 'duration_secs': 0.011039} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.327251] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1135.327485] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1135.327713] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.327859] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1135.328107] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1135.328598] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8bfd7e59-37eb-4445-8164-7f04ef21a5d1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.337908] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1135.338115] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1135.338821] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-185ad8b4-7188-46ca-bfb1-a1cb00ed6a0e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.346087] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1135.346087] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52afd387-cc76-8c6b-2016-932a1efbffd9" [ 1135.346087] env[68638]: _type = "Task" [ 1135.346087] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.353986] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52afd387-cc76-8c6b-2016-932a1efbffd9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.430490] env[68638]: DEBUG nova.compute.utils [None req-580e2be9-3d45-4184-9ac5-fd2530492c17 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1135.437747] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f72bc754-ec9b-481b-90be-21bc2ee8c84e tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Releasing lock "refresh_cache-e0903192-4fa7-437a-9023-33e8e65124e3" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1135.438049] env[68638]: DEBUG nova.objects.instance [None req-f72bc754-ec9b-481b-90be-21bc2ee8c84e tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lazy-loading 'migration_context' on Instance uuid e0903192-4fa7-437a-9023-33e8e65124e3 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1135.529476] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: ee752ace-fa19-4fd7-af89-f6628ce3d087] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1135.715356] env[68638]: DEBUG nova.compute.manager [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1135.715589] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1135.716597] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0cd7769-3d5b-4af5-8f0d-0fa4bc9a4cc4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.725162] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1135.725415] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9272ef98-11be-40d3-b048-64b64e348a20 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.735488] env[68638]: DEBUG oslo_vmware.api [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1135.735488] env[68638]: value = "task-2834427" [ 1135.735488] env[68638]: _type = "Task" [ 1135.735488] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.744501] env[68638]: DEBUG oslo_vmware.api [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834427, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.791340] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Releasing lock "refresh_cache-3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1135.791692] env[68638]: DEBUG nova.compute.manager [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Instance network_info: |[{"id": "95615d9a-8948-4dd3-bc9d-c65df08cd7a8", "address": "fa:16:3e:71:ee:44", "network": {"id": "4ccf9e56-9fb3-48ff-bf2d-a35faedb905b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1191830363-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ae89c3992e04141bf24be9d9e84e302", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95615d9a-89", "ovs_interfaceid": "95615d9a-8948-4dd3-bc9d-c65df08cd7a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1135.792046] env[68638]: DEBUG oslo_concurrency.lockutils [req-0fbaf1e2-3ba3-40d1-8cef-3b3a20f2e840 req-4463dd80-fc72-4455-85eb-dd2247108b0b service nova] Acquired lock "refresh_cache-3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1135.792278] env[68638]: DEBUG nova.network.neutron [req-0fbaf1e2-3ba3-40d1-8cef-3b3a20f2e840 req-4463dd80-fc72-4455-85eb-dd2247108b0b service nova] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Refreshing network info cache for port 95615d9a-8948-4dd3-bc9d-c65df08cd7a8 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1135.793708] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:ee:44', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e547d234-640c-449b-8279-0b16f75d6627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '95615d9a-8948-4dd3-bc9d-c65df08cd7a8', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1135.802858] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1135.806819] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1135.810185] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed902592-5471-4335-97ee-8ee39dc3b952 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.841317] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1135.841317] env[68638]: value = "task-2834428" [ 1135.841317] env[68638]: _type = "Task" [ 1135.841317] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.852547] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834428, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.859906] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52afd387-cc76-8c6b-2016-932a1efbffd9, 'name': SearchDatastore_Task, 'duration_secs': 0.009187} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.860843] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45901aa0-97d8-4b28-8ec8-5bd354fac6e2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.869249] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1135.869249] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52eca3c1-8e79-29d8-04fa-486a59470077" [ 1135.869249] env[68638]: _type = "Task" [ 1135.869249] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.880818] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52eca3c1-8e79-29d8-04fa-486a59470077, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.935478] env[68638]: DEBUG oslo_concurrency.lockutils [None req-580e2be9-3d45-4184-9ac5-fd2530492c17 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "c66805eb-fd97-4fe3-984d-8759f227d7fc" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1135.940458] env[68638]: DEBUG nova.objects.base [None req-f72bc754-ec9b-481b-90be-21bc2ee8c84e tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=68638) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1135.941508] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ffe8ba8-7fe4-4897-b9ea-cd37756be653 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.965629] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f6badfe-615d-424f-9fc3-9414893e8b9c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.972684] env[68638]: DEBUG oslo_vmware.api [None req-f72bc754-ec9b-481b-90be-21bc2ee8c84e tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1135.972684] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52a76f9b-648b-23fe-4e82-01c567f56db3" [ 1135.972684] env[68638]: _type = "Task" [ 1135.972684] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.983181] env[68638]: DEBUG oslo_vmware.api [None req-f72bc754-ec9b-481b-90be-21bc2ee8c84e tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a76f9b-648b-23fe-4e82-01c567f56db3, 'name': SearchDatastore_Task, 'duration_secs': 0.007622} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.983474] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f72bc754-ec9b-481b-90be-21bc2ee8c84e tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.983751] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f72bc754-ec9b-481b-90be-21bc2ee8c84e tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1136.035424] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: a98f0c63-d327-47b9-b0c2-f7790f1ae87d] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1136.071890] env[68638]: DEBUG nova.network.neutron [req-0fbaf1e2-3ba3-40d1-8cef-3b3a20f2e840 req-4463dd80-fc72-4455-85eb-dd2247108b0b service nova] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Updated VIF entry in instance network info cache for port 95615d9a-8948-4dd3-bc9d-c65df08cd7a8. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1136.072325] env[68638]: DEBUG nova.network.neutron [req-0fbaf1e2-3ba3-40d1-8cef-3b3a20f2e840 req-4463dd80-fc72-4455-85eb-dd2247108b0b service nova] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Updating instance_info_cache with network_info: [{"id": "95615d9a-8948-4dd3-bc9d-c65df08cd7a8", "address": "fa:16:3e:71:ee:44", "network": {"id": "4ccf9e56-9fb3-48ff-bf2d-a35faedb905b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1191830363-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ae89c3992e04141bf24be9d9e84e302", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95615d9a-89", "ovs_interfaceid": "95615d9a-8948-4dd3-bc9d-c65df08cd7a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1136.246198] env[68638]: DEBUG oslo_vmware.api [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834427, 'name': PowerOffVM_Task, 'duration_secs': 0.217926} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.246464] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1136.246617] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1136.246895] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ec40ad85-e388-4abf-9452-8661f2ac6942 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.318715] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1136.322349] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1136.322585] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Deleting the datastore file [datastore2] 2fa9b930-c76c-4cac-a371-a6b9899dc71e {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1136.323300] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b1d4c632-2f52-4467-9b6d-2e060149d3db {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.331284] env[68638]: DEBUG oslo_vmware.api [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for the task: (returnval){ [ 1136.331284] env[68638]: value = "task-2834430" [ 1136.331284] env[68638]: _type = "Task" [ 1136.331284] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.340709] env[68638]: DEBUG oslo_vmware.api [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834430, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.349768] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834428, 'name': CreateVM_Task, 'duration_secs': 0.402634} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.352009] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1136.352689] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.352857] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1136.353194] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1136.353732] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3b7c16d-d17a-404c-b29a-b695c8b2f37b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.358833] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1136.358833] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528a00de-d28d-b3ea-f82a-75aab34c1a89" [ 1136.358833] env[68638]: _type = "Task" [ 1136.358833] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.366951] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528a00de-d28d-b3ea-f82a-75aab34c1a89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.379656] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52eca3c1-8e79-29d8-04fa-486a59470077, 'name': SearchDatastore_Task, 'duration_secs': 0.012856} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.379953] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1136.380173] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 25c35c36-71c9-48cd-b7e4-6293eef890e5/25c35c36-71c9-48cd-b7e4-6293eef890e5.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1136.380429] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2459204c-00ff-4901-8073-635a8fa1d039 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.389827] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1136.389827] env[68638]: value = "task-2834431" [ 1136.389827] env[68638]: _type = "Task" [ 1136.389827] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.397611] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834431, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.538194] env[68638]: DEBUG nova.network.neutron [None req-0204cba0-fc90-4130-832c-8a037e41278e tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Updating instance_info_cache with network_info: [{"id": "22c8d069-e6d1-4644-89d8-516903e4ef3d", "address": "fa:16:3e:07:da:cc", "network": {"id": "e7719a30-81aa-48f1-a272-5246f78d9891", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1890376720-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fdd5447a0546b7b0fe2ed9ea0efc73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22c8d069-e6", "ovs_interfaceid": "22c8d069-e6d1-4644-89d8-516903e4ef3d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1136.542097] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 27ff37a6-de93-4a4b-904f-a91fdb8b0aff] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1136.575552] env[68638]: DEBUG oslo_concurrency.lockutils [req-0fbaf1e2-3ba3-40d1-8cef-3b3a20f2e840 req-4463dd80-fc72-4455-85eb-dd2247108b0b service nova] Releasing lock "refresh_cache-3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1136.702505] env[68638]: DEBUG oslo_concurrency.lockutils [None req-acba1aee-ae9e-41b5-903a-4121207fbc77 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.702942] env[68638]: DEBUG oslo_concurrency.lockutils [None req-acba1aee-ae9e-41b5-903a-4121207fbc77 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1136.764096] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-460ff5e3-7a35-47b0-81bb-1ee903bb13e1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.774929] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af352aa-9a3f-4a3b-93dd-c9cd99dd3123 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.817031] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e98e579a-993e-4661-91f1-f0a698677f34 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.828926] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e76509-90f3-43d0-8e51-307087b25f34 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.869266] env[68638]: DEBUG oslo_vmware.api [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Task: {'id': task-2834430, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160712} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.869963] env[68638]: DEBUG nova.compute.provider_tree [None req-f72bc754-ec9b-481b-90be-21bc2ee8c84e tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1136.873300] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1136.873300] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1136.873300] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1136.873300] env[68638]: INFO nova.compute.manager [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1136.873847] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1136.878786] env[68638]: DEBUG nova.compute.manager [-] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1136.878934] env[68638]: DEBUG nova.network.neutron [-] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1136.891648] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528a00de-d28d-b3ea-f82a-75aab34c1a89, 'name': SearchDatastore_Task, 'duration_secs': 0.010355} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.896955] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1136.897322] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1136.897663] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.897769] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1136.897995] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1136.898634] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2d4d5baf-bb8b-4868-b5b6-94260085dee0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.904188] env[68638]: DEBUG nova.compute.manager [req-9372c872-243e-4dd4-b199-838d9253e8b4 req-4028cabd-c7f9-49eb-89af-078aa5a05522 service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Received event network-changed-acf48d4a-b19e-47d9-a807-d221c4f0fd05 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1136.904419] env[68638]: DEBUG nova.compute.manager [req-9372c872-243e-4dd4-b199-838d9253e8b4 req-4028cabd-c7f9-49eb-89af-078aa5a05522 service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Refreshing instance network info cache due to event network-changed-acf48d4a-b19e-47d9-a807-d221c4f0fd05. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1136.904913] env[68638]: DEBUG oslo_concurrency.lockutils [req-9372c872-243e-4dd4-b199-838d9253e8b4 req-4028cabd-c7f9-49eb-89af-078aa5a05522 service nova] Acquiring lock "refresh_cache-ace44b04-6dcf-4845-af4e-b28ddeebe60e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.904913] env[68638]: DEBUG oslo_concurrency.lockutils [req-9372c872-243e-4dd4-b199-838d9253e8b4 req-4028cabd-c7f9-49eb-89af-078aa5a05522 service nova] Acquired lock "refresh_cache-ace44b04-6dcf-4845-af4e-b28ddeebe60e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1136.905098] env[68638]: DEBUG nova.network.neutron [req-9372c872-243e-4dd4-b199-838d9253e8b4 req-4028cabd-c7f9-49eb-89af-078aa5a05522 service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Refreshing network info cache for port acf48d4a-b19e-47d9-a807-d221c4f0fd05 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1136.910755] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834431, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.921485] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1136.921741] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1136.923124] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0326c8c8-8610-4465-b0e6-3e3efff53288 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.932229] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1136.932229] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]522b2291-8133-59b8-95b7-17ad8a807f4c" [ 1136.932229] env[68638]: _type = "Task" [ 1136.932229] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.941688] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]522b2291-8133-59b8-95b7-17ad8a807f4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.010819] env[68638]: DEBUG oslo_concurrency.lockutils [None req-580e2be9-3d45-4184-9ac5-fd2530492c17 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "c66805eb-fd97-4fe3-984d-8759f227d7fc" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.011090] env[68638]: DEBUG oslo_concurrency.lockutils [None req-580e2be9-3d45-4184-9ac5-fd2530492c17 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "c66805eb-fd97-4fe3-984d-8759f227d7fc" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.011339] env[68638]: INFO nova.compute.manager [None req-580e2be9-3d45-4184-9ac5-fd2530492c17 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Attaching volume f0ac565d-daae-4e70-96da-6609123bd482 to /dev/sdb [ 1137.042317] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0204cba0-fc90-4130-832c-8a037e41278e tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Releasing lock "refresh_cache-ba07529b-e6d0-4c22-b938-c4908a7eafd7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1137.042632] env[68638]: DEBUG nova.objects.instance [None req-0204cba0-fc90-4130-832c-8a037e41278e tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lazy-loading 'migration_context' on Instance uuid ba07529b-e6d0-4c22-b938-c4908a7eafd7 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1137.045530] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c48981a-a625-4985-8d19-36c269334e38 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.049112] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 4c725cb3-4ce5-4ea2-80a9-8122edd5d3e7] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1137.056446] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cffc3eda-2832-48cb-850c-df6adae3bcc6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.076670] env[68638]: DEBUG nova.virt.block_device [None req-580e2be9-3d45-4184-9ac5-fd2530492c17 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Updating existing volume attachment record: c23c8cc7-a05f-4ef0-a360-cefcb42e0979 {{(pid=68638) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1137.206415] env[68638]: DEBUG nova.compute.utils [None req-acba1aee-ae9e-41b5-903a-4121207fbc77 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1137.247505] env[68638]: DEBUG nova.compute.manager [req-3bed1142-65de-484f-b1f8-4fc001bb9736 req-6a259a5b-a31d-401d-a746-0039081bbea5 service nova] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Received event network-vif-deleted-822f87a6-55e6-4bdc-bdb8-9cbe45e33ce2 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1137.247714] env[68638]: INFO nova.compute.manager [req-3bed1142-65de-484f-b1f8-4fc001bb9736 req-6a259a5b-a31d-401d-a746-0039081bbea5 service nova] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Neutron deleted interface 822f87a6-55e6-4bdc-bdb8-9cbe45e33ce2; detaching it from the instance and deleting it from the info cache [ 1137.247998] env[68638]: DEBUG nova.network.neutron [req-3bed1142-65de-484f-b1f8-4fc001bb9736 req-6a259a5b-a31d-401d-a746-0039081bbea5 service nova] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1137.379714] env[68638]: DEBUG nova.scheduler.client.report [None req-f72bc754-ec9b-481b-90be-21bc2ee8c84e tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1137.402391] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834431, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.619261} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.402676] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 25c35c36-71c9-48cd-b7e4-6293eef890e5/25c35c36-71c9-48cd-b7e4-6293eef890e5.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1137.402895] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1137.403171] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-97d921c7-2cb9-48dc-9338-ed35bc1763ed {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.410376] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1137.410376] env[68638]: value = "task-2834433" [ 1137.410376] env[68638]: _type = "Task" [ 1137.410376] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.423842] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834433, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.459082] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]522b2291-8133-59b8-95b7-17ad8a807f4c, 'name': SearchDatastore_Task, 'duration_secs': 0.020489} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.459082] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a609a06a-9348-46fe-b489-2b5c0dd12c4b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.467033] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1137.467033] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]523a231e-a458-8bcb-9674-9d8b05a3ae4a" [ 1137.467033] env[68638]: _type = "Task" [ 1137.467033] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.475287] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523a231e-a458-8bcb-9674-9d8b05a3ae4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.551013] env[68638]: DEBUG nova.objects.base [None req-0204cba0-fc90-4130-832c-8a037e41278e tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=68638) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1137.551013] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c97d4a0-ed8b-4cfa-890f-82ff052a4991 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.556705] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 63669b15-2ec8-4a0d-b772-6ef7407e8ebf] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1137.576178] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e34297d4-abe1-40dd-849a-044858eb5dd8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.583416] env[68638]: DEBUG oslo_vmware.api [None req-0204cba0-fc90-4130-832c-8a037e41278e tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1137.583416] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e971ef-cf81-cdcb-e812-13fd5f8a1d31" [ 1137.583416] env[68638]: _type = "Task" [ 1137.583416] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.594570] env[68638]: DEBUG oslo_vmware.api [None req-0204cba0-fc90-4130-832c-8a037e41278e tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e971ef-cf81-cdcb-e812-13fd5f8a1d31, 'name': SearchDatastore_Task, 'duration_secs': 0.008214} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.594570] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0204cba0-fc90-4130-832c-8a037e41278e tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.648219] env[68638]: DEBUG nova.network.neutron [-] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1137.649696] env[68638]: DEBUG nova.network.neutron [req-9372c872-243e-4dd4-b199-838d9253e8b4 req-4028cabd-c7f9-49eb-89af-078aa5a05522 service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Updated VIF entry in instance network info cache for port acf48d4a-b19e-47d9-a807-d221c4f0fd05. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1137.651918] env[68638]: DEBUG nova.network.neutron [req-9372c872-243e-4dd4-b199-838d9253e8b4 req-4028cabd-c7f9-49eb-89af-078aa5a05522 service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Updating instance_info_cache with network_info: [{"id": "acf48d4a-b19e-47d9-a807-d221c4f0fd05", "address": "fa:16:3e:93:77:d3", "network": {"id": "5f368894-f202-48ed-bdd5-62442b47a35d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2025484418-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e7777e8e5d342d68e2f54e23d125314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacf48d4a-b1", "ovs_interfaceid": "acf48d4a-b19e-47d9-a807-d221c4f0fd05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1137.710358] env[68638]: DEBUG oslo_concurrency.lockutils [None req-acba1aee-ae9e-41b5-903a-4121207fbc77 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.751546] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6faed426-efba-4bca-9f38-c6839dabffe0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.762342] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79a5675-82d7-4960-b740-30f985a5ddf0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.799879] env[68638]: DEBUG nova.compute.manager [req-3bed1142-65de-484f-b1f8-4fc001bb9736 req-6a259a5b-a31d-401d-a746-0039081bbea5 service nova] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Detach interface failed, port_id=822f87a6-55e6-4bdc-bdb8-9cbe45e33ce2, reason: Instance 2fa9b930-c76c-4cac-a371-a6b9899dc71e could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1137.921367] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834433, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077497} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.921652] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1137.922491] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f8f5e02-5ab8-4438-8c33-5ebb7a8c772a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.946237] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] 25c35c36-71c9-48cd-b7e4-6293eef890e5/25c35c36-71c9-48cd-b7e4-6293eef890e5.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1137.946545] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d0e8bfa-5eb3-4b03-b978-a49acc774c9a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.967542] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1137.967542] env[68638]: value = "task-2834434" [ 1137.967542] env[68638]: _type = "Task" [ 1137.967542] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.980861] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834434, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.984210] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523a231e-a458-8bcb-9674-9d8b05a3ae4a, 'name': SearchDatastore_Task, 'duration_secs': 0.012643} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.984458] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1137.984718] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb/3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1137.984983] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3eb9333e-b77b-4d87-8131-b4dae3cfb732 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.992984] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1137.992984] env[68638]: value = "task-2834435" [ 1137.992984] env[68638]: _type = "Task" [ 1137.992984] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.001912] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834435, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.058039] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 333d88b6-2182-4e9c-9430-058e67921828] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1138.149902] env[68638]: INFO nova.compute.manager [-] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Took 1.27 seconds to deallocate network for instance. [ 1138.155058] env[68638]: DEBUG oslo_concurrency.lockutils [req-9372c872-243e-4dd4-b199-838d9253e8b4 req-4028cabd-c7f9-49eb-89af-078aa5a05522 service nova] Releasing lock "refresh_cache-ace44b04-6dcf-4845-af4e-b28ddeebe60e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1138.390747] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f72bc754-ec9b-481b-90be-21bc2ee8c84e tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.407s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1138.395317] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0204cba0-fc90-4130-832c-8a037e41278e tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.801s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1138.482323] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834434, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.505965] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834435, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.560473] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 809416da-af6c-429d-b4b2-5334768aa744] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1138.657202] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1138.773248] env[68638]: DEBUG oslo_concurrency.lockutils [None req-acba1aee-ae9e-41b5-903a-4121207fbc77 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1138.773400] env[68638]: DEBUG oslo_concurrency.lockutils [None req-acba1aee-ae9e-41b5-903a-4121207fbc77 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1138.773610] env[68638]: INFO nova.compute.manager [None req-acba1aee-ae9e-41b5-903a-4121207fbc77 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Attaching volume 3b4492e5-2043-4208-a34e-33c78329a761 to /dev/sdb [ 1138.805808] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e9de453-bfb3-4c3e-8914-7474fc2e558a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.813207] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f74dcbe-59f3-4762-8b49-378f2a2d7569 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.828730] env[68638]: DEBUG nova.virt.block_device [None req-acba1aee-ae9e-41b5-903a-4121207fbc77 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Updating existing volume attachment record: f324b2d0-4dfa-40de-b95a-03f7120d00ab {{(pid=68638) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1138.871865] env[68638]: INFO nova.compute.manager [None req-8d0f59b9-38de-4126-b598-7699ac1be9c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Get console output [ 1138.872435] env[68638]: WARNING nova.virt.vmwareapi.driver [None req-8d0f59b9-38de-4126-b598-7699ac1be9c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] The console log is missing. Check your VSPC configuration [ 1138.959695] env[68638]: INFO nova.scheduler.client.report [None req-f72bc754-ec9b-481b-90be-21bc2ee8c84e tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Deleted allocation for migration 810edb53-f308-46ed-9eb8-5991ced4eb1c [ 1138.983049] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834434, 'name': ReconfigVM_Task, 'duration_secs': 0.731588} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.983373] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Reconfigured VM instance instance-00000071 to attach disk [datastore1] 25c35c36-71c9-48cd-b7e4-6293eef890e5/25c35c36-71c9-48cd-b7e4-6293eef890e5.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1138.986390] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ac19ea5b-2f11-43d4-a520-c351be96d6a6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.994907] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1138.994907] env[68638]: value = "task-2834436" [ 1138.994907] env[68638]: _type = "Task" [ 1138.994907] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.011136] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834435, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.615156} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.014328] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb/3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1139.014574] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1139.014856] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834436, 'name': Rename_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.017673] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2171dac3-6a88-48b0-a48d-a2b677373e69 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.024609] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1139.024609] env[68638]: value = "task-2834438" [ 1139.024609] env[68638]: _type = "Task" [ 1139.024609] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.035798] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834438, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.063839] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 90c192bd-b823-414c-b793-260eacc9904f] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1139.146541] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9877a441-3bcf-4d22-9c18-4a8e94fccb4b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.157147] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebcd2f88-6de6-4e58-b26b-abece3be5e45 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.191496] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82940a34-3aed-41d6-a96c-deedc6066228 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.195127] env[68638]: DEBUG nova.compute.manager [req-6ab8b4f9-3184-4ea8-8ce8-bf69bb9b6cd1 req-33e6c486-bec0-423b-a135-8a257f3ac54b service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Received event network-changed-acf48d4a-b19e-47d9-a807-d221c4f0fd05 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1139.195319] env[68638]: DEBUG nova.compute.manager [req-6ab8b4f9-3184-4ea8-8ce8-bf69bb9b6cd1 req-33e6c486-bec0-423b-a135-8a257f3ac54b service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Refreshing instance network info cache due to event network-changed-acf48d4a-b19e-47d9-a807-d221c4f0fd05. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1139.195528] env[68638]: DEBUG oslo_concurrency.lockutils [req-6ab8b4f9-3184-4ea8-8ce8-bf69bb9b6cd1 req-33e6c486-bec0-423b-a135-8a257f3ac54b service nova] Acquiring lock "refresh_cache-ace44b04-6dcf-4845-af4e-b28ddeebe60e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.195767] env[68638]: DEBUG oslo_concurrency.lockutils [req-6ab8b4f9-3184-4ea8-8ce8-bf69bb9b6cd1 req-33e6c486-bec0-423b-a135-8a257f3ac54b service nova] Acquired lock "refresh_cache-ace44b04-6dcf-4845-af4e-b28ddeebe60e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1139.195939] env[68638]: DEBUG nova.network.neutron [req-6ab8b4f9-3184-4ea8-8ce8-bf69bb9b6cd1 req-33e6c486-bec0-423b-a135-8a257f3ac54b service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Refreshing network info cache for port acf48d4a-b19e-47d9-a807-d221c4f0fd05 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1139.203819] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c7b4049-1b99-488c-8b1d-c68a71b4d7a9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.218525] env[68638]: DEBUG nova.compute.provider_tree [None req-0204cba0-fc90-4130-832c-8a037e41278e tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1139.466453] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f72bc754-ec9b-481b-90be-21bc2ee8c84e tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "e0903192-4fa7-437a-9023-33e8e65124e3" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.470s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1139.509191] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834436, 'name': Rename_Task, 'duration_secs': 0.162256} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.509520] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1139.509846] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a8ce0a0e-d9f0-463f-81cc-8be4d56b357f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.519408] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1139.519408] env[68638]: value = "task-2834440" [ 1139.519408] env[68638]: _type = "Task" [ 1139.519408] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.532340] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834440, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.538239] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834438, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082094} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.538559] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1139.539491] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-934f7aec-8406-4534-bc4c-5c36db68004b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.564669] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb/3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1139.565075] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f794aae5-8347-49b5-9e18-6c5ef6b6d7b3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.581088] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: bb86aabd-129d-4c14-9db1-6676a5e7b9fa] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1139.591412] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1139.591412] env[68638]: value = "task-2834441" [ 1139.591412] env[68638]: _type = "Task" [ 1139.591412] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.607170] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834441, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.722830] env[68638]: DEBUG nova.scheduler.client.report [None req-0204cba0-fc90-4130-832c-8a037e41278e tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1140.004026] env[68638]: DEBUG nova.network.neutron [req-6ab8b4f9-3184-4ea8-8ce8-bf69bb9b6cd1 req-33e6c486-bec0-423b-a135-8a257f3ac54b service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Updated VIF entry in instance network info cache for port acf48d4a-b19e-47d9-a807-d221c4f0fd05. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1140.005879] env[68638]: DEBUG nova.network.neutron [req-6ab8b4f9-3184-4ea8-8ce8-bf69bb9b6cd1 req-33e6c486-bec0-423b-a135-8a257f3ac54b service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Updating instance_info_cache with network_info: [{"id": "acf48d4a-b19e-47d9-a807-d221c4f0fd05", "address": "fa:16:3e:93:77:d3", "network": {"id": "5f368894-f202-48ed-bdd5-62442b47a35d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-2025484418-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e7777e8e5d342d68e2f54e23d125314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2907cce-d529-4809-af05-d29397bed211", "external-id": "nsx-vlan-transportzone-427", "segmentation_id": 427, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacf48d4a-b1", "ovs_interfaceid": "acf48d4a-b19e-47d9-a807-d221c4f0fd05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1140.030505] env[68638]: DEBUG oslo_vmware.api [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834440, 'name': PowerOnVM_Task, 'duration_secs': 0.479311} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.031029] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1140.031355] env[68638]: INFO nova.compute.manager [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Took 9.20 seconds to spawn the instance on the hypervisor. [ 1140.031643] env[68638]: DEBUG nova.compute.manager [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1140.032531] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5962e27c-7708-42a5-b224-0094d0f308a8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.083818] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 039edcf8-7908-4be4-8bd3-0b55545b6f7b] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1140.102737] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834441, 'name': ReconfigVM_Task, 'duration_secs': 0.344821} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.103043] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb/3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1140.103699] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4fe1c20e-126b-42ee-9fe3-dca32a51e072 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.111433] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1140.111433] env[68638]: value = "task-2834442" [ 1140.111433] env[68638]: _type = "Task" [ 1140.111433] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.126671] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834442, 'name': Rename_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.507298] env[68638]: DEBUG oslo_concurrency.lockutils [req-6ab8b4f9-3184-4ea8-8ce8-bf69bb9b6cd1 req-33e6c486-bec0-423b-a135-8a257f3ac54b service nova] Releasing lock "refresh_cache-ace44b04-6dcf-4845-af4e-b28ddeebe60e" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1140.553797] env[68638]: INFO nova.compute.manager [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Took 15.05 seconds to build instance. [ 1140.587650] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: a09c4492-34fd-4010-b547-bfb5b61f252d] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1140.622762] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834442, 'name': Rename_Task, 'duration_secs': 0.170638} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.623051] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1140.623309] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-078cd71d-60e5-48d0-9d81-39c58476edcf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.631429] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1140.631429] env[68638]: value = "task-2834443" [ 1140.631429] env[68638]: _type = "Task" [ 1140.631429] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.639273] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834443, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.736443] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0204cba0-fc90-4130-832c-8a037e41278e tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.341s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1140.739365] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.082s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1140.739584] env[68638]: DEBUG nova.objects.instance [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lazy-loading 'resources' on Instance uuid 2fa9b930-c76c-4cac-a371-a6b9899dc71e {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1140.829080] env[68638]: DEBUG nova.compute.manager [req-ef1a5f67-baa3-40c4-a471-cfb9178748a0 req-bf478ec0-e60f-4899-8dd5-211a9642112d service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Received event network-changed-3199e8a3-335c-43ff-be19-3881b85a0203 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1140.829301] env[68638]: DEBUG nova.compute.manager [req-ef1a5f67-baa3-40c4-a471-cfb9178748a0 req-bf478ec0-e60f-4899-8dd5-211a9642112d service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Refreshing instance network info cache due to event network-changed-3199e8a3-335c-43ff-be19-3881b85a0203. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1140.829522] env[68638]: DEBUG oslo_concurrency.lockutils [req-ef1a5f67-baa3-40c4-a471-cfb9178748a0 req-bf478ec0-e60f-4899-8dd5-211a9642112d service nova] Acquiring lock "refresh_cache-25c35c36-71c9-48cd-b7e4-6293eef890e5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.829668] env[68638]: DEBUG oslo_concurrency.lockutils [req-ef1a5f67-baa3-40c4-a471-cfb9178748a0 req-bf478ec0-e60f-4899-8dd5-211a9642112d service nova] Acquired lock "refresh_cache-25c35c36-71c9-48cd-b7e4-6293eef890e5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1140.829831] env[68638]: DEBUG nova.network.neutron [req-ef1a5f67-baa3-40c4-a471-cfb9178748a0 req-bf478ec0-e60f-4899-8dd5-211a9642112d service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Refreshing network info cache for port 3199e8a3-335c-43ff-be19-3881b85a0203 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1140.970999] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquiring lock "a576ba6f-5e3b-4408-b95d-2084a072ec12" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1140.971350] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "a576ba6f-5e3b-4408-b95d-2084a072ec12" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1140.971542] env[68638]: INFO nova.compute.manager [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Shelving [ 1141.056034] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a02efe6b-655f-4e30-9b62-12c77ef1e332 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "25c35c36-71c9-48cd-b7e4-6293eef890e5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.564s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.091020] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 1b176c5d-e77c-410b-b282-b7bba65359a9] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1141.142115] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834443, 'name': PowerOnVM_Task} progress is 81%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.308254] env[68638]: INFO nova.scheduler.client.report [None req-0204cba0-fc90-4130-832c-8a037e41278e tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Deleted allocation for migration a52636b4-6b32-44a1-963d-dab28c234feb [ 1141.480960] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be4d8161-5d26-4e1d-a65b-3a2798fd7d31 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.491242] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d76c37f-75c9-4596-871a-d6a060634250 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.528491] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf50f55e-35b6-48f1-bef8-8fa22804e44c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.538560] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4869e6e3-65fb-4208-ac8a-8b16da4e8057 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.552440] env[68638]: DEBUG nova.compute.provider_tree [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1141.594546] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 9c0d1c2d-88ea-40be-aef1-43b37b4dca3e] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1141.600493] env[68638]: DEBUG nova.network.neutron [req-ef1a5f67-baa3-40c4-a471-cfb9178748a0 req-bf478ec0-e60f-4899-8dd5-211a9642112d service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Updated VIF entry in instance network info cache for port 3199e8a3-335c-43ff-be19-3881b85a0203. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1141.601313] env[68638]: DEBUG nova.network.neutron [req-ef1a5f67-baa3-40c4-a471-cfb9178748a0 req-bf478ec0-e60f-4899-8dd5-211a9642112d service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Updating instance_info_cache with network_info: [{"id": "3199e8a3-335c-43ff-be19-3881b85a0203", "address": "fa:16:3e:e5:0e:d2", "network": {"id": "4723bf0f-71b1-4997-b6ce-06f7dbda84df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-488516254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee5d59c43e974d04ba56981f2716ff60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3199e8a3-33", "ovs_interfaceid": "3199e8a3-335c-43ff-be19-3881b85a0203", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.625886] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-580e2be9-3d45-4184-9ac5-fd2530492c17 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Volume attach. Driver type: vmdk {{(pid=68638) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1141.626134] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-580e2be9-3d45-4184-9ac5-fd2530492c17 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570040', 'volume_id': 'f0ac565d-daae-4e70-96da-6609123bd482', 'name': 'volume-f0ac565d-daae-4e70-96da-6609123bd482', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c66805eb-fd97-4fe3-984d-8759f227d7fc', 'attached_at': '', 'detached_at': '', 'volume_id': 'f0ac565d-daae-4e70-96da-6609123bd482', 'serial': 'f0ac565d-daae-4e70-96da-6609123bd482'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1141.627385] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee85ffcc-6d50-4ee0-8d5a-7c5f1f08b273 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.647578] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-818a1fa3-776f-4cf3-a78a-c2db6265c68d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.653864] env[68638]: DEBUG oslo_vmware.api [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834443, 'name': PowerOnVM_Task, 'duration_secs': 0.729366} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.654610] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1141.654873] env[68638]: INFO nova.compute.manager [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Took 8.47 seconds to spawn the instance on the hypervisor. [ 1141.655778] env[68638]: DEBUG nova.compute.manager [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1141.659078] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6b1056-4f0c-41d5-949c-c843738a7cfa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.681256] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-580e2be9-3d45-4184-9ac5-fd2530492c17 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] volume-f0ac565d-daae-4e70-96da-6609123bd482/volume-f0ac565d-daae-4e70-96da-6609123bd482.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1141.682067] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cddf7248-73ef-4818-aee0-9bf0ed315d15 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.706547] env[68638]: DEBUG oslo_vmware.api [None req-580e2be9-3d45-4184-9ac5-fd2530492c17 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1141.706547] env[68638]: value = "task-2834445" [ 1141.706547] env[68638]: _type = "Task" [ 1141.706547] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.719991] env[68638]: DEBUG oslo_vmware.api [None req-580e2be9-3d45-4184-9ac5-fd2530492c17 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834445, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.814861] env[68638]: DEBUG oslo_concurrency.lockutils [None req-0204cba0-fc90-4130-832c-8a037e41278e tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "ba07529b-e6d0-4c22-b938-c4908a7eafd7" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.061s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.985219] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1141.985601] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-36277a12-91e3-4a34-ac08-01d9f570da4c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.994150] env[68638]: DEBUG oslo_vmware.api [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1141.994150] env[68638]: value = "task-2834446" [ 1141.994150] env[68638]: _type = "Task" [ 1141.994150] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.003021] env[68638]: DEBUG oslo_vmware.api [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834446, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.055301] env[68638]: DEBUG nova.scheduler.client.report [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1142.097538] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 4edaaa5d-535a-4c63-ab44-724548a0f3eb] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1142.104091] env[68638]: DEBUG oslo_concurrency.lockutils [req-ef1a5f67-baa3-40c4-a471-cfb9178748a0 req-bf478ec0-e60f-4899-8dd5-211a9642112d service nova] Releasing lock "refresh_cache-25c35c36-71c9-48cd-b7e4-6293eef890e5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1142.212728] env[68638]: INFO nova.compute.manager [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Took 16.37 seconds to build instance. [ 1142.218949] env[68638]: DEBUG oslo_vmware.api [None req-580e2be9-3d45-4184-9ac5-fd2530492c17 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834445, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.511951] env[68638]: DEBUG oslo_vmware.api [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834446, 'name': PowerOffVM_Task, 'duration_secs': 0.243318} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.512742] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1142.513723] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e2ff081-27a5-4df8-b7fc-7f05e96d78c0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.535324] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6073f58-c328-46e2-bb7e-2f3842905ee8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.561231] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.822s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1142.583560] env[68638]: INFO nova.scheduler.client.report [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Deleted allocations for instance 2fa9b930-c76c-4cac-a371-a6b9899dc71e [ 1142.601729] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: c80895d5-1a59-4779-9da9-9aeec10bc395] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1142.718578] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1e213b80-a753-419c-8476-4cfbfce16a8b tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.883s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1142.718900] env[68638]: DEBUG oslo_vmware.api [None req-580e2be9-3d45-4184-9ac5-fd2530492c17 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834445, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.885351] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa377fc-03e5-4cc3-a69b-fcbcb6c20dbb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.892561] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-af391ab4-4ebc-42cc-918c-baeeb3752592 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Suspending the VM {{(pid=68638) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1142.892832] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-b7ea1fcf-1c87-4f8f-90cd-b3a16c4b595a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.901078] env[68638]: DEBUG oslo_vmware.api [None req-af391ab4-4ebc-42cc-918c-baeeb3752592 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1142.901078] env[68638]: value = "task-2834447" [ 1142.901078] env[68638]: _type = "Task" [ 1142.901078] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.910196] env[68638]: DEBUG oslo_vmware.api [None req-af391ab4-4ebc-42cc-918c-baeeb3752592 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834447, 'name': SuspendVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.006214] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "ba07529b-e6d0-4c22-b938-c4908a7eafd7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1143.006494] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "ba07529b-e6d0-4c22-b938-c4908a7eafd7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.006712] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "ba07529b-e6d0-4c22-b938-c4908a7eafd7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1143.006958] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "ba07529b-e6d0-4c22-b938-c4908a7eafd7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.007166] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "ba07529b-e6d0-4c22-b938-c4908a7eafd7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1143.009275] env[68638]: INFO nova.compute.manager [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Terminating instance [ 1143.048818] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Creating Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1143.049109] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3ba8285c-2c6a-4f67-9d58-6a219fc9129a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.061430] env[68638]: DEBUG oslo_vmware.api [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1143.061430] env[68638]: value = "task-2834448" [ 1143.061430] env[68638]: _type = "Task" [ 1143.061430] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.070623] env[68638]: DEBUG oslo_vmware.api [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834448, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.091574] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74b3cd48-d93a-48e2-926a-d7a231375cc1 tempest-ServersTestJSON-1866430456 tempest-ServersTestJSON-1866430456-project-member] Lock "2fa9b930-c76c-4cac-a371-a6b9899dc71e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.882s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1143.105407] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 7617a7b1-3b21-4d38-b090-1d35bc74637b] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1143.216930] env[68638]: DEBUG oslo_vmware.api [None req-580e2be9-3d45-4184-9ac5-fd2530492c17 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834445, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.375504] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-acba1aee-ae9e-41b5-903a-4121207fbc77 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Volume attach. Driver type: vmdk {{(pid=68638) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1143.375752] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-acba1aee-ae9e-41b5-903a-4121207fbc77 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570041', 'volume_id': '3b4492e5-2043-4208-a34e-33c78329a761', 'name': 'volume-3b4492e5-2043-4208-a34e-33c78329a761', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1', 'attached_at': '', 'detached_at': '', 'volume_id': '3b4492e5-2043-4208-a34e-33c78329a761', 'serial': '3b4492e5-2043-4208-a34e-33c78329a761'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1143.376697] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1803d6ce-0816-4387-904c-c8e783f8de5c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.394851] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d64d5c93-8579-4040-93f7-ac65198b9059 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.424211] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-acba1aee-ae9e-41b5-903a-4121207fbc77 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] volume-3b4492e5-2043-4208-a34e-33c78329a761/volume-3b4492e5-2043-4208-a34e-33c78329a761.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1143.428588] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ed07310-6b54-46f0-bf92-164e6244f181 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.460377] env[68638]: DEBUG oslo_vmware.api [None req-acba1aee-ae9e-41b5-903a-4121207fbc77 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 1143.460377] env[68638]: value = "task-2834449" [ 1143.460377] env[68638]: _type = "Task" [ 1143.460377] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.460836] env[68638]: DEBUG oslo_vmware.api [None req-af391ab4-4ebc-42cc-918c-baeeb3752592 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834447, 'name': SuspendVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.470363] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "00a22fef-5d10-4413-a9aa-070a1a863cdd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1143.470608] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "00a22fef-5d10-4413-a9aa-070a1a863cdd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.480935] env[68638]: DEBUG oslo_vmware.api [None req-acba1aee-ae9e-41b5-903a-4121207fbc77 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834449, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.512951] env[68638]: DEBUG nova.compute.manager [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1143.513359] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1143.514765] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ae72169-2ca2-4024-93f9-1fb56ce1b8ab {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.526887] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1143.527607] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c3ed9199-89f6-4845-9e0b-95f01222c6c1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.539732] env[68638]: DEBUG oslo_vmware.api [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1143.539732] env[68638]: value = "task-2834450" [ 1143.539732] env[68638]: _type = "Task" [ 1143.539732] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.553703] env[68638]: DEBUG oslo_vmware.api [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834450, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.575588] env[68638]: DEBUG oslo_vmware.api [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834448, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.609404] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1143.609615] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Cleaning up deleted instances with incomplete migration {{(pid=68638) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11791}} [ 1143.725406] env[68638]: DEBUG oslo_vmware.api [None req-580e2be9-3d45-4184-9ac5-fd2530492c17 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834445, 'name': ReconfigVM_Task, 'duration_secs': 1.787151} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.726878] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-580e2be9-3d45-4184-9ac5-fd2530492c17 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Reconfigured VM instance instance-00000067 to attach disk [datastore1] volume-f0ac565d-daae-4e70-96da-6609123bd482/volume-f0ac565d-daae-4e70-96da-6609123bd482.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1143.733969] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-69573267-dbcd-4db6-b760-0ff3d5ebac88 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.758602] env[68638]: DEBUG oslo_vmware.api [None req-580e2be9-3d45-4184-9ac5-fd2530492c17 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1143.758602] env[68638]: value = "task-2834451" [ 1143.758602] env[68638]: _type = "Task" [ 1143.758602] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.776933] env[68638]: DEBUG oslo_vmware.api [None req-580e2be9-3d45-4184-9ac5-fd2530492c17 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834451, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.930750] env[68638]: DEBUG oslo_vmware.api [None req-af391ab4-4ebc-42cc-918c-baeeb3752592 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834447, 'name': SuspendVM_Task} progress is 58%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.972837] env[68638]: DEBUG nova.compute.manager [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1143.975580] env[68638]: DEBUG oslo_vmware.api [None req-acba1aee-ae9e-41b5-903a-4121207fbc77 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834449, 'name': ReconfigVM_Task, 'duration_secs': 0.433115} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.975911] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-acba1aee-ae9e-41b5-903a-4121207fbc77 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Reconfigured VM instance instance-0000006a to attach disk [datastore2] volume-3b4492e5-2043-4208-a34e-33c78329a761/volume-3b4492e5-2043-4208-a34e-33c78329a761.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1143.981065] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a2aef582-042c-47de-a20a-92220749f0c0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.998178] env[68638]: DEBUG oslo_vmware.api [None req-acba1aee-ae9e-41b5-903a-4121207fbc77 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 1143.998178] env[68638]: value = "task-2834452" [ 1143.998178] env[68638]: _type = "Task" [ 1143.998178] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.007038] env[68638]: DEBUG oslo_vmware.api [None req-acba1aee-ae9e-41b5-903a-4121207fbc77 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834452, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.050145] env[68638]: DEBUG oslo_vmware.api [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834450, 'name': PowerOffVM_Task, 'duration_secs': 0.392936} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.050422] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1144.050584] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1144.050840] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c9d785e2-b801-47dc-8e66-6516d249be57 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.073330] env[68638]: DEBUG oslo_vmware.api [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834448, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.154995] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1144.155427] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1144.155500] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Deleting the datastore file [datastore1] ba07529b-e6d0-4c22-b938-c4908a7eafd7 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1144.155770] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-766f669d-4268-4a27-85f0-67fa344492bf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.165514] env[68638]: DEBUG oslo_vmware.api [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1144.165514] env[68638]: value = "task-2834454" [ 1144.165514] env[68638]: _type = "Task" [ 1144.165514] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.175824] env[68638]: DEBUG oslo_vmware.api [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834454, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.270229] env[68638]: DEBUG oslo_vmware.api [None req-580e2be9-3d45-4184-9ac5-fd2530492c17 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834451, 'name': ReconfigVM_Task, 'duration_secs': 0.235538} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.270396] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-580e2be9-3d45-4184-9ac5-fd2530492c17 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570040', 'volume_id': 'f0ac565d-daae-4e70-96da-6609123bd482', 'name': 'volume-f0ac565d-daae-4e70-96da-6609123bd482', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c66805eb-fd97-4fe3-984d-8759f227d7fc', 'attached_at': '', 'detached_at': '', 'volume_id': 'f0ac565d-daae-4e70-96da-6609123bd482', 'serial': 'f0ac565d-daae-4e70-96da-6609123bd482'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1144.429010] env[68638]: DEBUG oslo_vmware.api [None req-af391ab4-4ebc-42cc-918c-baeeb3752592 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834447, 'name': SuspendVM_Task, 'duration_secs': 1.211156} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.429312] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-af391ab4-4ebc-42cc-918c-baeeb3752592 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Suspended the VM {{(pid=68638) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1144.429495] env[68638]: DEBUG nova.compute.manager [None req-af391ab4-4ebc-42cc-918c-baeeb3752592 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1144.430311] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05851643-dede-4e34-ac18-e0074497ab97 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.504419] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1144.504641] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1144.506237] env[68638]: INFO nova.compute.claims [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1144.515207] env[68638]: DEBUG oslo_vmware.api [None req-acba1aee-ae9e-41b5-903a-4121207fbc77 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834452, 'name': ReconfigVM_Task, 'duration_secs': 0.175335} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.515370] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-acba1aee-ae9e-41b5-903a-4121207fbc77 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570041', 'volume_id': '3b4492e5-2043-4208-a34e-33c78329a761', 'name': 'volume-3b4492e5-2043-4208-a34e-33c78329a761', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1', 'attached_at': '', 'detached_at': '', 'volume_id': '3b4492e5-2043-4208-a34e-33c78329a761', 'serial': '3b4492e5-2043-4208-a34e-33c78329a761'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1144.574261] env[68638]: DEBUG oslo_vmware.api [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834448, 'name': CreateSnapshot_Task, 'duration_secs': 1.188376} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.574541] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Created Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1144.575351] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2e383f8-59c6-4354-afe0-be71dd16b7e8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.676474] env[68638]: DEBUG oslo_vmware.api [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834454, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.336686} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.676743] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1144.676982] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1144.677172] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1144.677350] env[68638]: INFO nova.compute.manager [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1144.677602] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1144.677792] env[68638]: DEBUG nova.compute.manager [-] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1144.677909] env[68638]: DEBUG nova.network.neutron [-] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1145.056199] env[68638]: DEBUG nova.compute.manager [req-8c909d47-0eb1-4d15-bb34-c86f6ea41626 req-50fefb75-7af4-4765-84d0-f34ab2e5ae96 service nova] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Received event network-vif-deleted-22c8d069-e6d1-4644-89d8-516903e4ef3d {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1145.056531] env[68638]: INFO nova.compute.manager [req-8c909d47-0eb1-4d15-bb34-c86f6ea41626 req-50fefb75-7af4-4765-84d0-f34ab2e5ae96 service nova] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Neutron deleted interface 22c8d069-e6d1-4644-89d8-516903e4ef3d; detaching it from the instance and deleting it from the info cache [ 1145.056582] env[68638]: DEBUG nova.network.neutron [req-8c909d47-0eb1-4d15-bb34-c86f6ea41626 req-50fefb75-7af4-4765-84d0-f34ab2e5ae96 service nova] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.095698] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Creating linked-clone VM from snapshot {{(pid=68638) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1145.096376] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e74b0bb4-526c-4c20-a191-8a682c5bbffd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.108248] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1145.108632] env[68638]: DEBUG oslo_vmware.api [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1145.108632] env[68638]: value = "task-2834455" [ 1145.108632] env[68638]: _type = "Task" [ 1145.108632] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.108830] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1145.122170] env[68638]: DEBUG oslo_vmware.api [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834455, 'name': CloneVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.320597] env[68638]: DEBUG nova.objects.instance [None req-580e2be9-3d45-4184-9ac5-fd2530492c17 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lazy-loading 'flavor' on Instance uuid c66805eb-fd97-4fe3-984d-8759f227d7fc {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1145.394491] env[68638]: DEBUG oslo_concurrency.lockutils [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1145.394777] env[68638]: DEBUG oslo_concurrency.lockutils [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.395098] env[68638]: DEBUG oslo_concurrency.lockutils [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1145.395302] env[68638]: DEBUG oslo_concurrency.lockutils [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.395483] env[68638]: DEBUG oslo_concurrency.lockutils [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.397710] env[68638]: INFO nova.compute.manager [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Terminating instance [ 1145.485116] env[68638]: DEBUG nova.network.neutron [-] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.555231] env[68638]: DEBUG nova.objects.instance [None req-acba1aee-ae9e-41b5-903a-4121207fbc77 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lazy-loading 'flavor' on Instance uuid 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1145.559483] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9f29d992-b924-4f2b-9f7a-a7f9f74e67b6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.571335] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e374fea8-e6dd-4c72-9ca3-c5719fe48236 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.621523] env[68638]: DEBUG nova.compute.manager [req-8c909d47-0eb1-4d15-bb34-c86f6ea41626 req-50fefb75-7af4-4765-84d0-f34ab2e5ae96 service nova] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Detach interface failed, port_id=22c8d069-e6d1-4644-89d8-516903e4ef3d, reason: Instance ba07529b-e6d0-4c22-b938-c4908a7eafd7 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1145.627863] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1145.628614] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1145.629252] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1145.629397] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68638) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1145.629624] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager.update_available_resource {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1145.635494] env[68638]: DEBUG oslo_vmware.api [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834455, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.752800] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-030dd84a-5f07-4355-b340-4998fe336dc6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.762146] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9241a8a5-2052-434e-be0f-eada92739571 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.799152] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5e251b2-0d10-44ac-ad6d-e6b3e2502cc4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.807812] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7afce2b-125c-4b14-be3f-02f50a87ce57 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.826252] env[68638]: DEBUG nova.compute.provider_tree [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1145.827728] env[68638]: DEBUG oslo_concurrency.lockutils [None req-580e2be9-3d45-4184-9ac5-fd2530492c17 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "c66805eb-fd97-4fe3-984d-8759f227d7fc" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.817s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.902279] env[68638]: DEBUG nova.compute.manager [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1145.902564] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1145.903522] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc567b7-bf20-4361-bb4a-0c35f49d9c8b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.913013] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1145.913307] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7a21fbb5-123b-403e-9ca5-22033d1ce6b0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.984978] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1145.985292] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1145.985501] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Deleting the datastore file [datastore1] 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1145.985809] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f0549f4f-6ce8-431e-8ffd-4a792a1ead50 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.988529] env[68638]: INFO nova.compute.manager [-] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Took 1.31 seconds to deallocate network for instance. [ 1145.994759] env[68638]: DEBUG oslo_vmware.api [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1145.994759] env[68638]: value = "task-2834457" [ 1145.994759] env[68638]: _type = "Task" [ 1145.994759] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.004278] env[68638]: DEBUG oslo_vmware.api [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834457, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.061601] env[68638]: DEBUG oslo_concurrency.lockutils [None req-acba1aee-ae9e-41b5-903a-4121207fbc77 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.287s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.136300] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.136782] env[68638]: DEBUG oslo_vmware.api [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834455, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.331158] env[68638]: DEBUG nova.scheduler.client.report [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1146.497521] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.511861] env[68638]: DEBUG oslo_vmware.api [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834457, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.217877} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.512378] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1146.512822] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1146.513177] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1146.513517] env[68638]: INFO nova.compute.manager [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1146.513980] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1146.514328] env[68638]: DEBUG nova.compute.manager [-] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1146.514523] env[68638]: DEBUG nova.network.neutron [-] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1146.589811] env[68638]: DEBUG oslo_concurrency.lockutils [None req-03d89a1e-a80a-4169-9982-3980eb1bd41d tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "c66805eb-fd97-4fe3-984d-8759f227d7fc" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.590096] env[68638]: DEBUG oslo_concurrency.lockutils [None req-03d89a1e-a80a-4169-9982-3980eb1bd41d tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "c66805eb-fd97-4fe3-984d-8759f227d7fc" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.628075] env[68638]: DEBUG oslo_vmware.api [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834455, 'name': CloneVM_Task, 'duration_secs': 1.453804} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.628337] env[68638]: INFO nova.virt.vmwareapi.vmops [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Created linked-clone VM from snapshot [ 1146.629088] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-697c4c53-b90c-4538-b25f-0b1c243e585f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.636581] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Uploading image a04f22d5-e7b9-474d-8313-7a4349baeb1c {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1146.662139] env[68638]: DEBUG oslo_vmware.rw_handles [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1146.662139] env[68638]: value = "vm-570043" [ 1146.662139] env[68638]: _type = "VirtualMachine" [ 1146.662139] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1146.662427] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-89ab6ca7-ea92-46df-a5bd-db1b0dc9fb28 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.672622] env[68638]: DEBUG oslo_vmware.rw_handles [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lease: (returnval){ [ 1146.672622] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ae732d-2bfc-f2ab-2b85-bd5e56306599" [ 1146.672622] env[68638]: _type = "HttpNfcLease" [ 1146.672622] env[68638]: } obtained for exporting VM: (result){ [ 1146.672622] env[68638]: value = "vm-570043" [ 1146.672622] env[68638]: _type = "VirtualMachine" [ 1146.672622] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1146.672851] env[68638]: DEBUG oslo_vmware.api [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the lease: (returnval){ [ 1146.672851] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ae732d-2bfc-f2ab-2b85-bd5e56306599" [ 1146.672851] env[68638]: _type = "HttpNfcLease" [ 1146.672851] env[68638]: } to be ready. {{(pid=68638) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1146.680715] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1146.680715] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ae732d-2bfc-f2ab-2b85-bd5e56306599" [ 1146.680715] env[68638]: _type = "HttpNfcLease" [ 1146.680715] env[68638]: } is initializing. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1146.838476] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.333s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.839218] env[68638]: DEBUG nova.compute.manager [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1146.842744] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.707s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.842969] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.843193] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68638) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1146.843569] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.347s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.843833] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.847982] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd84a67f-6e11-4a6f-8f1b-7f116ea24df1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.859674] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c55ec3d4-28af-4b0e-af4f-bc2984dad99f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.869043] env[68638]: INFO nova.scheduler.client.report [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Deleted allocations for instance ba07529b-e6d0-4c22-b938-c4908a7eafd7 [ 1146.881805] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-591c24de-2bae-481a-afc5-6fcaa9b52aed {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.890659] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-499209d3-1b99-41d5-8838-3bcc1c97e3bd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.922517] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179471MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=68638) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1146.922695] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.922887] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1147.084959] env[68638]: DEBUG nova.compute.manager [req-f58d2754-8977-48a5-829c-771493a83a45 req-145c8ffc-b64a-4cd5-a893-d43a0ce3bc60 service nova] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Received event network-vif-deleted-95615d9a-8948-4dd3-bc9d-c65df08cd7a8 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1147.085172] env[68638]: INFO nova.compute.manager [req-f58d2754-8977-48a5-829c-771493a83a45 req-145c8ffc-b64a-4cd5-a893-d43a0ce3bc60 service nova] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Neutron deleted interface 95615d9a-8948-4dd3-bc9d-c65df08cd7a8; detaching it from the instance and deleting it from the info cache [ 1147.085343] env[68638]: DEBUG nova.network.neutron [req-f58d2754-8977-48a5-829c-771493a83a45 req-145c8ffc-b64a-4cd5-a893-d43a0ce3bc60 service nova] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1147.092620] env[68638]: INFO nova.compute.manager [None req-03d89a1e-a80a-4169-9982-3980eb1bd41d tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Detaching volume f0ac565d-daae-4e70-96da-6609123bd482 [ 1147.125533] env[68638]: INFO nova.virt.block_device [None req-03d89a1e-a80a-4169-9982-3980eb1bd41d tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Attempting to driver detach volume f0ac565d-daae-4e70-96da-6609123bd482 from mountpoint /dev/sdb [ 1147.125795] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-03d89a1e-a80a-4169-9982-3980eb1bd41d tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Volume detach. Driver type: vmdk {{(pid=68638) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1147.126082] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-03d89a1e-a80a-4169-9982-3980eb1bd41d tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570040', 'volume_id': 'f0ac565d-daae-4e70-96da-6609123bd482', 'name': 'volume-f0ac565d-daae-4e70-96da-6609123bd482', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c66805eb-fd97-4fe3-984d-8759f227d7fc', 'attached_at': '', 'detached_at': '', 'volume_id': 'f0ac565d-daae-4e70-96da-6609123bd482', 'serial': 'f0ac565d-daae-4e70-96da-6609123bd482'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1147.127313] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb578e97-0a56-400c-975a-866f0b521e94 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.150970] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab2b0c9-54a9-4e17-b484-05dcf7f4c344 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.159274] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d71fd85-7359-4a17-a339-ccf67b32832d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.184427] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ffe0e9-2df6-4dde-a276-7c3ba6594514 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.189155] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fd11bf2a-876d-446c-a959-53e8ab3176a3 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.189412] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fd11bf2a-876d-446c-a959-53e8ab3176a3 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1147.202904] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-03d89a1e-a80a-4169-9982-3980eb1bd41d tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] The volume has not been displaced from its original location: [datastore1] volume-f0ac565d-daae-4e70-96da-6609123bd482/volume-f0ac565d-daae-4e70-96da-6609123bd482.vmdk. No consolidation needed. {{(pid=68638) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1147.208245] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-03d89a1e-a80a-4169-9982-3980eb1bd41d tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Reconfiguring VM instance instance-00000067 to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1147.208552] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1147.208552] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ae732d-2bfc-f2ab-2b85-bd5e56306599" [ 1147.208552] env[68638]: _type = "HttpNfcLease" [ 1147.208552] env[68638]: } is ready. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1147.210261] env[68638]: DEBUG nova.compute.utils [None req-fd11bf2a-876d-446c-a959-53e8ab3176a3 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1147.211519] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d4b2908c-8f05-4e1d-ae8a-67a43ef69ef5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.224386] env[68638]: DEBUG oslo_vmware.rw_handles [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1147.224386] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ae732d-2bfc-f2ab-2b85-bd5e56306599" [ 1147.224386] env[68638]: _type = "HttpNfcLease" [ 1147.224386] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1147.225980] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fd11bf2a-876d-446c-a959-53e8ab3176a3 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 0.036s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.227154] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70fcf463-78c4-4cf9-9bc7-9dfbd2538740 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.237715] env[68638]: DEBUG oslo_vmware.rw_handles [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52961ee4-7ef9-76ba-2e23-68352ef836a3/disk-0.vmdk from lease info. {{(pid=68638) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1147.237956] env[68638]: DEBUG oslo_vmware.rw_handles [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52961ee4-7ef9-76ba-2e23-68352ef836a3/disk-0.vmdk for reading. {{(pid=68638) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1147.240864] env[68638]: DEBUG oslo_vmware.api [None req-03d89a1e-a80a-4169-9982-3980eb1bd41d tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1147.240864] env[68638]: value = "task-2834459" [ 1147.240864] env[68638]: _type = "Task" [ 1147.240864] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.302349] env[68638]: DEBUG nova.network.neutron [-] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1147.312485] env[68638]: DEBUG oslo_vmware.api [None req-03d89a1e-a80a-4169-9982-3980eb1bd41d tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834459, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.348569] env[68638]: DEBUG nova.compute.utils [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1147.350051] env[68638]: DEBUG nova.compute.manager [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1147.350233] env[68638]: DEBUG nova.network.neutron [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1147.379807] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-70f578ac-685a-4ff1-9d7c-3517050a3816 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.389076] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b622a45e-c256-4411-9df8-d56dbc7a7fa3 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "ba07529b-e6d0-4c22-b938-c4908a7eafd7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.382s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.417191] env[68638]: DEBUG nova.policy [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7569a0fd95c644d38ef18de41870bde4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35fdd5447a0546b7b0fe2ed9ea0efc73', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1147.589817] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d2e05cb4-7c05-4f29-a350-6afc1bfbda8d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.600108] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8736793-1398-4546-9488-afae718e1082 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.639503] env[68638]: DEBUG nova.compute.manager [req-f58d2754-8977-48a5-829c-771493a83a45 req-145c8ffc-b64a-4cd5-a893-d43a0ce3bc60 service nova] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Detach interface failed, port_id=95615d9a-8948-4dd3-bc9d-c65df08cd7a8, reason: Instance 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1147.682647] env[68638]: DEBUG nova.network.neutron [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Successfully created port: b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1147.754280] env[68638]: DEBUG oslo_vmware.api [None req-03d89a1e-a80a-4169-9982-3980eb1bd41d tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834459, 'name': ReconfigVM_Task, 'duration_secs': 0.300248} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.756252] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-03d89a1e-a80a-4169-9982-3980eb1bd41d tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Reconfigured VM instance instance-00000067 to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1147.764374] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c23b3d5-6b48-48e6-a2b3-e2168e31b8e2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.783656] env[68638]: DEBUG oslo_vmware.api [None req-03d89a1e-a80a-4169-9982-3980eb1bd41d tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1147.783656] env[68638]: value = "task-2834460" [ 1147.783656] env[68638]: _type = "Task" [ 1147.783656] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.795394] env[68638]: DEBUG oslo_vmware.api [None req-03d89a1e-a80a-4169-9982-3980eb1bd41d tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834460, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.807024] env[68638]: INFO nova.compute.manager [-] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Took 1.29 seconds to deallocate network for instance. [ 1147.853678] env[68638]: DEBUG nova.compute.manager [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1147.959939] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 423af2cc-4dea-445f-a01c-6d4d57c3f0de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1147.960376] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 53e92f51-9010-4fb2-89e1-9d16a252ef6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1147.960552] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance df2e066d-7c71-4aec-ab9b-a339a7ff21fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1147.960779] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 7d99d946-f2df-4d31-911f-ac479849b901 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1147.960997] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance cc2e9758-45ee-4e94-ad74-ba7d6c85f06d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1147.961205] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance ace44b04-6dcf-4845-af4e-b28ddeebe60e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1147.961368] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1147.961587] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance c66805eb-fd97-4fe3-984d-8759f227d7fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1147.961707] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance a576ba6f-5e3b-4408-b95d-2084a072ec12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1147.961820] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1147.961932] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance e0903192-4fa7-437a-9023-33e8e65124e3 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1147.962058] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 25c35c36-71c9-48cd-b7e4-6293eef890e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1147.962173] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1147.962283] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 00a22fef-5d10-4413-a9aa-070a1a863cdd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1147.962498] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Total usable vcpus: 48, total allocated vcpus: 14 {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1147.962728] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3264MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=14 pci_stats=[] {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1148.156430] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed2d2f9b-7fe9-4bae-a15c-d153d71c31ba {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.166144] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641b904e-a1e5-4659-8298-b6f9caad113d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.200938] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f0d9a7-a000-4e25-a5da-d20c1c25dcf9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.210231] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51494b6c-e4ec-42b9-86c5-2bfb28d7b4d2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.227341] env[68638]: DEBUG nova.compute.provider_tree [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1148.276012] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fd11bf2a-876d-446c-a959-53e8ab3176a3 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.276505] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fd11bf2a-876d-446c-a959-53e8ab3176a3 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.276623] env[68638]: INFO nova.compute.manager [None req-fd11bf2a-876d-446c-a959-53e8ab3176a3 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Attaching volume e4ecec83-473b-4605-9037-89f5aa298624 to /dev/sdc [ 1148.295659] env[68638]: DEBUG oslo_vmware.api [None req-03d89a1e-a80a-4169-9982-3980eb1bd41d tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834460, 'name': ReconfigVM_Task, 'duration_secs': 0.178208} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.296065] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-03d89a1e-a80a-4169-9982-3980eb1bd41d tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570040', 'volume_id': 'f0ac565d-daae-4e70-96da-6609123bd482', 'name': 'volume-f0ac565d-daae-4e70-96da-6609123bd482', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c66805eb-fd97-4fe3-984d-8759f227d7fc', 'attached_at': '', 'detached_at': '', 'volume_id': 'f0ac565d-daae-4e70-96da-6609123bd482', 'serial': 'f0ac565d-daae-4e70-96da-6609123bd482'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1148.313235] env[68638]: DEBUG oslo_concurrency.lockutils [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.316446] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b70cf0-fa40-4652-bf00-5b3b4961f357 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.326074] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fdfc437-4e97-404d-8da1-b1715bc7bd41 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.342682] env[68638]: DEBUG nova.virt.block_device [None req-fd11bf2a-876d-446c-a959-53e8ab3176a3 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Updating existing volume attachment record: 3898f197-3065-453e-b877-65e0f5dc0fa4 {{(pid=68638) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1148.730669] env[68638]: DEBUG nova.scheduler.client.report [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1148.848539] env[68638]: DEBUG nova.objects.instance [None req-03d89a1e-a80a-4169-9982-3980eb1bd41d tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lazy-loading 'flavor' on Instance uuid c66805eb-fd97-4fe3-984d-8759f227d7fc {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1148.866300] env[68638]: DEBUG nova.compute.manager [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1148.898851] env[68638]: DEBUG nova.virt.hardware [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1148.899267] env[68638]: DEBUG nova.virt.hardware [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1148.899624] env[68638]: DEBUG nova.virt.hardware [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1148.899889] env[68638]: DEBUG nova.virt.hardware [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1148.900199] env[68638]: DEBUG nova.virt.hardware [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1148.900374] env[68638]: DEBUG nova.virt.hardware [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1148.900638] env[68638]: DEBUG nova.virt.hardware [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1148.900891] env[68638]: DEBUG nova.virt.hardware [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1148.901177] env[68638]: DEBUG nova.virt.hardware [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1148.901492] env[68638]: DEBUG nova.virt.hardware [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1148.901723] env[68638]: DEBUG nova.virt.hardware [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1148.902778] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42cf9b39-b3d5-49d0-bece-26c7b83f9ab7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.913599] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e0a0e5-4ed1-410a-a0fd-d80e750755bf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.130787] env[68638]: DEBUG nova.compute.manager [req-e63a1dfb-49e0-42c9-8600-84cb409200b0 req-39963f23-6bcc-4e4e-a910-0bd929479651 service nova] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Received event network-vif-plugged-b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1149.130787] env[68638]: DEBUG oslo_concurrency.lockutils [req-e63a1dfb-49e0-42c9-8600-84cb409200b0 req-39963f23-6bcc-4e4e-a910-0bd929479651 service nova] Acquiring lock "00a22fef-5d10-4413-a9aa-070a1a863cdd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.130787] env[68638]: DEBUG oslo_concurrency.lockutils [req-e63a1dfb-49e0-42c9-8600-84cb409200b0 req-39963f23-6bcc-4e4e-a910-0bd929479651 service nova] Lock "00a22fef-5d10-4413-a9aa-070a1a863cdd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1149.130787] env[68638]: DEBUG oslo_concurrency.lockutils [req-e63a1dfb-49e0-42c9-8600-84cb409200b0 req-39963f23-6bcc-4e4e-a910-0bd929479651 service nova] Lock "00a22fef-5d10-4413-a9aa-070a1a863cdd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.130787] env[68638]: DEBUG nova.compute.manager [req-e63a1dfb-49e0-42c9-8600-84cb409200b0 req-39963f23-6bcc-4e4e-a910-0bd929479651 service nova] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] No waiting events found dispatching network-vif-plugged-b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1149.130787] env[68638]: WARNING nova.compute.manager [req-e63a1dfb-49e0-42c9-8600-84cb409200b0 req-39963f23-6bcc-4e4e-a910-0bd929479651 service nova] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Received unexpected event network-vif-plugged-b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8 for instance with vm_state building and task_state spawning. [ 1149.235926] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68638) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1149.236120] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.313s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.236503] env[68638]: DEBUG oslo_concurrency.lockutils [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.923s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1149.236717] env[68638]: DEBUG nova.objects.instance [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lazy-loading 'resources' on Instance uuid 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1149.306022] env[68638]: DEBUG nova.network.neutron [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Successfully updated port: b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1149.811956] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "refresh_cache-00a22fef-5d10-4413-a9aa-070a1a863cdd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.812248] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired lock "refresh_cache-00a22fef-5d10-4413-a9aa-070a1a863cdd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.812359] env[68638]: DEBUG nova.network.neutron [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1149.856551] env[68638]: DEBUG oslo_concurrency.lockutils [None req-03d89a1e-a80a-4169-9982-3980eb1bd41d tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "c66805eb-fd97-4fe3-984d-8759f227d7fc" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.266s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.929198] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dc04883-b450-4f8c-b2a6-41bd9f8369e1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.938297] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9847a705-2796-4749-951b-dea32ad5616a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.968932] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc9c1003-7381-4678-b335-f5d17c390ba7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.976956] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6008b8b8-ab6e-4824-ab51-96ee44dbc8d2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.992266] env[68638]: DEBUG nova.compute.provider_tree [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1150.345637] env[68638]: DEBUG nova.network.neutron [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1150.495478] env[68638]: DEBUG nova.scheduler.client.report [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1150.533603] env[68638]: DEBUG nova.network.neutron [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Updating instance_info_cache with network_info: [{"id": "b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8", "address": "fa:16:3e:d6:46:73", "network": {"id": "e7719a30-81aa-48f1-a272-5246f78d9891", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1890376720-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fdd5447a0546b7b0fe2ed9ea0efc73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1a8c37a-0e", "ovs_interfaceid": "b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.882497] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "c66805eb-fd97-4fe3-984d-8759f227d7fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1150.882890] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "c66805eb-fd97-4fe3-984d-8759f227d7fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1150.882937] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "c66805eb-fd97-4fe3-984d-8759f227d7fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1150.883134] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "c66805eb-fd97-4fe3-984d-8759f227d7fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1150.883315] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "c66805eb-fd97-4fe3-984d-8759f227d7fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1150.885576] env[68638]: INFO nova.compute.manager [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Terminating instance [ 1151.000406] env[68638]: DEBUG oslo_concurrency.lockutils [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.764s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.020100] env[68638]: INFO nova.scheduler.client.report [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Deleted allocations for instance 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb [ 1151.035832] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Releasing lock "refresh_cache-00a22fef-5d10-4413-a9aa-070a1a863cdd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1151.036163] env[68638]: DEBUG nova.compute.manager [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Instance network_info: |[{"id": "b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8", "address": "fa:16:3e:d6:46:73", "network": {"id": "e7719a30-81aa-48f1-a272-5246f78d9891", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1890376720-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fdd5447a0546b7b0fe2ed9ea0efc73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1a8c37a-0e", "ovs_interfaceid": "b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1151.036588] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:46:73', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1151.044237] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1151.044473] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1151.044704] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2c703206-46aa-4655-a722-8a93c9529490 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.066638] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1151.066638] env[68638]: value = "task-2834463" [ 1151.066638] env[68638]: _type = "Task" [ 1151.066638] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.075914] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834463, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.160969] env[68638]: DEBUG nova.compute.manager [req-10a020b3-e46f-4509-9965-b8ad6b5e9f55 req-3555e1e1-0872-404f-9164-90357de034b4 service nova] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Received event network-changed-b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1151.161284] env[68638]: DEBUG nova.compute.manager [req-10a020b3-e46f-4509-9965-b8ad6b5e9f55 req-3555e1e1-0872-404f-9164-90357de034b4 service nova] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Refreshing instance network info cache due to event network-changed-b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1151.161638] env[68638]: DEBUG oslo_concurrency.lockutils [req-10a020b3-e46f-4509-9965-b8ad6b5e9f55 req-3555e1e1-0872-404f-9164-90357de034b4 service nova] Acquiring lock "refresh_cache-00a22fef-5d10-4413-a9aa-070a1a863cdd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.161905] env[68638]: DEBUG oslo_concurrency.lockutils [req-10a020b3-e46f-4509-9965-b8ad6b5e9f55 req-3555e1e1-0872-404f-9164-90357de034b4 service nova] Acquired lock "refresh_cache-00a22fef-5d10-4413-a9aa-070a1a863cdd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.162226] env[68638]: DEBUG nova.network.neutron [req-10a020b3-e46f-4509-9965-b8ad6b5e9f55 req-3555e1e1-0872-404f-9164-90357de034b4 service nova] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Refreshing network info cache for port b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1151.389956] env[68638]: DEBUG nova.compute.manager [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1151.390210] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1151.391267] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6870da8e-d256-41b3-85b2-369c86094bee {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.399674] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1151.399995] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b0f790f3-16ea-426c-9782-ea74f00b47ca {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.407621] env[68638]: DEBUG oslo_vmware.api [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1151.407621] env[68638]: value = "task-2834464" [ 1151.407621] env[68638]: _type = "Task" [ 1151.407621] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.416831] env[68638]: DEBUG oslo_vmware.api [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834464, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.527317] env[68638]: DEBUG oslo_concurrency.lockutils [None req-611d2643-8eaf-4a19-82a6-c813aae6218f tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.132s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.577905] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834463, 'name': CreateVM_Task, 'duration_secs': 0.382664} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.578114] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1151.578826] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.578992] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.579366] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1151.579629] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-079e9542-46d9-4e13-b3e8-bdd33bf1478b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.585553] env[68638]: DEBUG oslo_vmware.api [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1151.585553] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5293235e-9a16-03ac-3f8f-b6185f5cf378" [ 1151.585553] env[68638]: _type = "Task" [ 1151.585553] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.597616] env[68638]: DEBUG oslo_vmware.api [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5293235e-9a16-03ac-3f8f-b6185f5cf378, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.922718] env[68638]: DEBUG oslo_vmware.api [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834464, 'name': PowerOffVM_Task, 'duration_secs': 0.245435} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.924265] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1151.924265] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1151.924265] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-af0ecec9-9737-4fcc-b25a-37b4562db481 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.953021] env[68638]: DEBUG nova.network.neutron [req-10a020b3-e46f-4509-9965-b8ad6b5e9f55 req-3555e1e1-0872-404f-9164-90357de034b4 service nova] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Updated VIF entry in instance network info cache for port b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1151.953259] env[68638]: DEBUG nova.network.neutron [req-10a020b3-e46f-4509-9965-b8ad6b5e9f55 req-3555e1e1-0872-404f-9164-90357de034b4 service nova] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Updating instance_info_cache with network_info: [{"id": "b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8", "address": "fa:16:3e:d6:46:73", "network": {"id": "e7719a30-81aa-48f1-a272-5246f78d9891", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1890376720-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fdd5447a0546b7b0fe2ed9ea0efc73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1a8c37a-0e", "ovs_interfaceid": "b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.004139] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1152.004444] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1152.004675] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Deleting the datastore file [datastore2] c66805eb-fd97-4fe3-984d-8759f227d7fc {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1152.005020] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e56ec14e-61fd-4622-9dc6-512899d9c8e2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.015863] env[68638]: DEBUG oslo_vmware.api [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1152.015863] env[68638]: value = "task-2834466" [ 1152.015863] env[68638]: _type = "Task" [ 1152.015863] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.024723] env[68638]: DEBUG oslo_vmware.api [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834466, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.101623] env[68638]: DEBUG oslo_vmware.api [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5293235e-9a16-03ac-3f8f-b6185f5cf378, 'name': SearchDatastore_Task, 'duration_secs': 0.013772} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.102164] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1152.102568] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1152.102973] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.103279] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1152.103611] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1152.104029] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e2c553a6-68d8-436d-8e26-0f2854f6a235 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.125497] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1152.125726] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1152.126501] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fe02383-6210-45a7-a248-1c9d7f4c5271 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.132591] env[68638]: DEBUG oslo_vmware.api [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1152.132591] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d5131f-ad52-b9ce-39cb-206d70e62f1d" [ 1152.132591] env[68638]: _type = "Task" [ 1152.132591] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.141231] env[68638]: DEBUG oslo_vmware.api [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d5131f-ad52-b9ce-39cb-206d70e62f1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.456902] env[68638]: DEBUG oslo_concurrency.lockutils [req-10a020b3-e46f-4509-9965-b8ad6b5e9f55 req-3555e1e1-0872-404f-9164-90357de034b4 service nova] Releasing lock "refresh_cache-00a22fef-5d10-4413-a9aa-070a1a863cdd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1152.468730] env[68638]: DEBUG oslo_concurrency.lockutils [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "2e788c4c-f6d1-4001-9389-1068887d205f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.468948] env[68638]: DEBUG oslo_concurrency.lockutils [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "2e788c4c-f6d1-4001-9389-1068887d205f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1152.527285] env[68638]: DEBUG oslo_vmware.api [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834466, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.26163} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.527547] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1152.527728] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1152.527902] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1152.528113] env[68638]: INFO nova.compute.manager [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1152.528359] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1152.528555] env[68638]: DEBUG nova.compute.manager [-] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1152.528652] env[68638]: DEBUG nova.network.neutron [-] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1152.645358] env[68638]: DEBUG oslo_vmware.api [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d5131f-ad52-b9ce-39cb-206d70e62f1d, 'name': SearchDatastore_Task, 'duration_secs': 0.013904} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.646178] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94b96c2d-e27d-46e2-9d4f-86608cc4e68c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.653379] env[68638]: DEBUG oslo_vmware.api [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1152.653379] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528f9cf2-d505-7386-b33b-fe7331280dd9" [ 1152.653379] env[68638]: _type = "Task" [ 1152.653379] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.663288] env[68638]: DEBUG oslo_vmware.api [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528f9cf2-d505-7386-b33b-fe7331280dd9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.895175] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd11bf2a-876d-446c-a959-53e8ab3176a3 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Volume attach. Driver type: vmdk {{(pid=68638) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1152.895451] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd11bf2a-876d-446c-a959-53e8ab3176a3 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570044', 'volume_id': 'e4ecec83-473b-4605-9037-89f5aa298624', 'name': 'volume-e4ecec83-473b-4605-9037-89f5aa298624', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1', 'attached_at': '', 'detached_at': '', 'volume_id': 'e4ecec83-473b-4605-9037-89f5aa298624', 'serial': 'e4ecec83-473b-4605-9037-89f5aa298624'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1152.896386] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-def8b8ea-8be3-404e-bf5e-18c164ccf01e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.914423] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d705f54b-635f-47d9-a28b-cbacc7a9f212 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.945122] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd11bf2a-876d-446c-a959-53e8ab3176a3 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] volume-e4ecec83-473b-4605-9037-89f5aa298624/volume-e4ecec83-473b-4605-9037-89f5aa298624.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1152.945531] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c4e25c0-b01a-4fe5-9a38-0a276a2f6799 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.968893] env[68638]: DEBUG oslo_vmware.api [None req-fd11bf2a-876d-446c-a959-53e8ab3176a3 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 1152.968893] env[68638]: value = "task-2834467" [ 1152.968893] env[68638]: _type = "Task" [ 1152.968893] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.973151] env[68638]: DEBUG nova.compute.manager [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1152.982383] env[68638]: DEBUG oslo_vmware.api [None req-fd11bf2a-876d-446c-a959-53e8ab3176a3 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834467, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.164695] env[68638]: DEBUG oslo_vmware.api [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528f9cf2-d505-7386-b33b-fe7331280dd9, 'name': SearchDatastore_Task, 'duration_secs': 0.020196} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.165103] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1153.165328] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 00a22fef-5d10-4413-a9aa-070a1a863cdd/00a22fef-5d10-4413-a9aa-070a1a863cdd.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1153.165622] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d65302c-0003-4c4a-81fa-06e678b0c29c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.175266] env[68638]: DEBUG oslo_vmware.api [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1153.175266] env[68638]: value = "task-2834468" [ 1153.175266] env[68638]: _type = "Task" [ 1153.175266] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.185255] env[68638]: DEBUG oslo_vmware.api [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834468, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.191802] env[68638]: DEBUG nova.compute.manager [req-d97304d2-8743-4899-b800-3c866fee9caf req-01833093-b86e-464d-8cf4-cbbc42472520 service nova] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Received event network-vif-deleted-2ee11caa-7a55-450e-b8b2-af4bc1c60e64 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1153.191802] env[68638]: INFO nova.compute.manager [req-d97304d2-8743-4899-b800-3c866fee9caf req-01833093-b86e-464d-8cf4-cbbc42472520 service nova] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Neutron deleted interface 2ee11caa-7a55-450e-b8b2-af4bc1c60e64; detaching it from the instance and deleting it from the info cache [ 1153.191802] env[68638]: DEBUG nova.network.neutron [req-d97304d2-8743-4899-b800-3c866fee9caf req-01833093-b86e-464d-8cf4-cbbc42472520 service nova] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1153.482482] env[68638]: DEBUG oslo_vmware.api [None req-fd11bf2a-876d-446c-a959-53e8ab3176a3 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834467, 'name': ReconfigVM_Task, 'duration_secs': 0.440332} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.485292] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd11bf2a-876d-446c-a959-53e8ab3176a3 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Reconfigured VM instance instance-0000006a to attach disk [datastore2] volume-e4ecec83-473b-4605-9037-89f5aa298624/volume-e4ecec83-473b-4605-9037-89f5aa298624.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1153.491091] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d58da164-c900-44f0-a797-aff3a7686c4e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.504789] env[68638]: DEBUG oslo_concurrency.lockutils [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1153.505186] env[68638]: DEBUG oslo_concurrency.lockutils [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1153.506921] env[68638]: INFO nova.compute.claims [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1153.511558] env[68638]: DEBUG oslo_vmware.api [None req-fd11bf2a-876d-446c-a959-53e8ab3176a3 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 1153.511558] env[68638]: value = "task-2834469" [ 1153.511558] env[68638]: _type = "Task" [ 1153.511558] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.517063] env[68638]: DEBUG nova.network.neutron [-] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1153.522603] env[68638]: DEBUG oslo_vmware.api [None req-fd11bf2a-876d-446c-a959-53e8ab3176a3 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834469, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.693994] env[68638]: DEBUG oslo_vmware.api [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834468, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.695408] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8924bbba-9a30-4585-b893-74d766daecf5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.708761] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3f8309-3d5d-4f84-bb04-c06485a24e43 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.754615] env[68638]: DEBUG nova.compute.manager [req-d97304d2-8743-4899-b800-3c866fee9caf req-01833093-b86e-464d-8cf4-cbbc42472520 service nova] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Detach interface failed, port_id=2ee11caa-7a55-450e-b8b2-af4bc1c60e64, reason: Instance c66805eb-fd97-4fe3-984d-8759f227d7fc could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1154.025354] env[68638]: INFO nova.compute.manager [-] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Took 1.50 seconds to deallocate network for instance. [ 1154.025354] env[68638]: DEBUG oslo_vmware.api [None req-fd11bf2a-876d-446c-a959-53e8ab3176a3 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834469, 'name': ReconfigVM_Task, 'duration_secs': 0.254802} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.027390] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd11bf2a-876d-446c-a959-53e8ab3176a3 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570044', 'volume_id': 'e4ecec83-473b-4605-9037-89f5aa298624', 'name': 'volume-e4ecec83-473b-4605-9037-89f5aa298624', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1', 'attached_at': '', 'detached_at': '', 'volume_id': 'e4ecec83-473b-4605-9037-89f5aa298624', 'serial': 'e4ecec83-473b-4605-9037-89f5aa298624'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1154.187461] env[68638]: DEBUG oslo_vmware.api [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834468, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.548772} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.187753] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 00a22fef-5d10-4413-a9aa-070a1a863cdd/00a22fef-5d10-4413-a9aa-070a1a863cdd.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1154.187983] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1154.188271] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0d000e94-548f-4a77-8e2e-f5cc229b1854 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.196023] env[68638]: DEBUG oslo_vmware.api [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1154.196023] env[68638]: value = "task-2834470" [ 1154.196023] env[68638]: _type = "Task" [ 1154.196023] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.204629] env[68638]: DEBUG oslo_vmware.api [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834470, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.534881] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.709308] env[68638]: DEBUG oslo_vmware.api [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834470, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.127066} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.709693] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1154.710921] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c21f0fb-ba4c-4715-a290-b1c829bbdaee {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.716045] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef14a264-5580-455d-b962-04bb0fee1ed4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.739300] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc7c9612-bd5c-4e37-ab3e-b7f7547ea839 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.753146] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] 00a22fef-5d10-4413-a9aa-070a1a863cdd/00a22fef-5d10-4413-a9aa-070a1a863cdd.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1154.753484] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3894f4c7-31c3-46c9-9209-991dc91c00a6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.799359] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed4a6685-be33-4523-8b11-51989ce72a32 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.802219] env[68638]: DEBUG oslo_vmware.api [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1154.802219] env[68638]: value = "task-2834471" [ 1154.802219] env[68638]: _type = "Task" [ 1154.802219] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.809858] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-444235ee-bf99-42d8-baad-d1484f0a18f4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.817069] env[68638]: DEBUG oslo_vmware.api [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834471, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.827720] env[68638]: DEBUG nova.compute.provider_tree [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1155.006499] env[68638]: DEBUG oslo_vmware.rw_handles [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52961ee4-7ef9-76ba-2e23-68352ef836a3/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1155.007507] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084c885c-628b-416e-bd1e-a1d0284cc6f5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.014738] env[68638]: DEBUG oslo_vmware.rw_handles [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52961ee4-7ef9-76ba-2e23-68352ef836a3/disk-0.vmdk is in state: ready. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1155.014904] env[68638]: ERROR oslo_vmware.rw_handles [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52961ee4-7ef9-76ba-2e23-68352ef836a3/disk-0.vmdk due to incomplete transfer. [ 1155.015190] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-1ca349e1-c80e-48c5-8018-cabcdf3707fb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.023977] env[68638]: DEBUG oslo_vmware.rw_handles [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52961ee4-7ef9-76ba-2e23-68352ef836a3/disk-0.vmdk. {{(pid=68638) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1155.024188] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Uploaded image a04f22d5-e7b9-474d-8313-7a4349baeb1c to the Glance image server {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1155.026730] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Destroying the VM {{(pid=68638) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1155.027036] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-422e4210-cad7-4fc9-808a-89b03c77aa40 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.033589] env[68638]: DEBUG oslo_vmware.api [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1155.033589] env[68638]: value = "task-2834472" [ 1155.033589] env[68638]: _type = "Task" [ 1155.033589] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.042810] env[68638]: DEBUG oslo_vmware.api [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834472, 'name': Destroy_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.069220] env[68638]: DEBUG nova.objects.instance [None req-fd11bf2a-876d-446c-a959-53e8ab3176a3 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lazy-loading 'flavor' on Instance uuid 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1155.314211] env[68638]: DEBUG oslo_vmware.api [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834471, 'name': ReconfigVM_Task, 'duration_secs': 0.3338} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.314566] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Reconfigured VM instance instance-00000073 to attach disk [datastore2] 00a22fef-5d10-4413-a9aa-070a1a863cdd/00a22fef-5d10-4413-a9aa-070a1a863cdd.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1155.315308] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bfe85056-2673-4f9f-8e48-c9deda94cc6a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.323118] env[68638]: DEBUG oslo_vmware.api [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1155.323118] env[68638]: value = "task-2834473" [ 1155.323118] env[68638]: _type = "Task" [ 1155.323118] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.332751] env[68638]: DEBUG nova.scheduler.client.report [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1155.336057] env[68638]: DEBUG oslo_vmware.api [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834473, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.543760] env[68638]: DEBUG oslo_vmware.api [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834472, 'name': Destroy_Task, 'duration_secs': 0.35992} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.543972] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Destroyed the VM [ 1155.544230] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Deleting Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1155.544476] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-96cbe2e2-a35f-4d24-9685-caec13217252 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.551571] env[68638]: DEBUG oslo_vmware.api [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1155.551571] env[68638]: value = "task-2834474" [ 1155.551571] env[68638]: _type = "Task" [ 1155.551571] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.560623] env[68638]: DEBUG oslo_vmware.api [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834474, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.574101] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fd11bf2a-876d-446c-a959-53e8ab3176a3 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.298s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.833743] env[68638]: DEBUG oslo_vmware.api [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834473, 'name': Rename_Task, 'duration_secs': 0.165421} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.834014] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1155.834262] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a9f840bc-6c8f-421f-b8f9-235dd586c7b4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.837645] env[68638]: DEBUG oslo_concurrency.lockutils [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.333s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.838181] env[68638]: DEBUG nova.compute.manager [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1155.841711] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.307s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.841928] env[68638]: DEBUG nova.objects.instance [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lazy-loading 'resources' on Instance uuid c66805eb-fd97-4fe3-984d-8759f227d7fc {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1155.842894] env[68638]: DEBUG oslo_vmware.api [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1155.842894] env[68638]: value = "task-2834475" [ 1155.842894] env[68638]: _type = "Task" [ 1155.842894] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.852274] env[68638]: DEBUG oslo_vmware.api [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834475, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.902105] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4084b182-fca2-4979-9827-ccaa16da422f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1155.902329] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4084b182-fca2-4979-9827-ccaa16da422f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1156.061927] env[68638]: DEBUG oslo_vmware.api [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834474, 'name': RemoveSnapshot_Task, 'duration_secs': 0.382313} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.062334] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Deleted Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1156.062433] env[68638]: DEBUG nova.compute.manager [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1156.063268] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b819d966-f9a2-4d78-ba72-bc152c9dec06 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.344849] env[68638]: DEBUG nova.compute.utils [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1156.349269] env[68638]: DEBUG nova.compute.manager [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1156.349636] env[68638]: DEBUG nova.network.neutron [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1156.362463] env[68638]: DEBUG oslo_vmware.api [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834475, 'name': PowerOnVM_Task, 'duration_secs': 0.486094} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.362768] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1156.362997] env[68638]: INFO nova.compute.manager [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Took 7.50 seconds to spawn the instance on the hypervisor. [ 1156.363223] env[68638]: DEBUG nova.compute.manager [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1156.364066] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b73732-d9a6-45dc-8576-e98bd1e220af {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.402059] env[68638]: DEBUG nova.policy [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '815b8ce8a95a4f76a28506fe20117298', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2ae89c3992e04141bf24be9d9e84e302', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1156.406518] env[68638]: INFO nova.compute.manager [None req-4084b182-fca2-4979-9827-ccaa16da422f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Detaching volume 3b4492e5-2043-4208-a34e-33c78329a761 [ 1156.443804] env[68638]: INFO nova.virt.block_device [None req-4084b182-fca2-4979-9827-ccaa16da422f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Attempting to driver detach volume 3b4492e5-2043-4208-a34e-33c78329a761 from mountpoint /dev/sdb [ 1156.444057] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-4084b182-fca2-4979-9827-ccaa16da422f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Volume detach. Driver type: vmdk {{(pid=68638) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1156.444337] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-4084b182-fca2-4979-9827-ccaa16da422f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570041', 'volume_id': '3b4492e5-2043-4208-a34e-33c78329a761', 'name': 'volume-3b4492e5-2043-4208-a34e-33c78329a761', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1', 'attached_at': '', 'detached_at': '', 'volume_id': '3b4492e5-2043-4208-a34e-33c78329a761', 'serial': '3b4492e5-2043-4208-a34e-33c78329a761'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1156.445378] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9966f7d6-9827-47fd-90a6-4fd7a6ad9bff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.477215] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e3291cc-57de-40e2-bdaa-625badf050fc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.485302] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5176d512-f8f2-4f6f-8369-3ab2d8007c46 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.514155] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f240460d-a7f8-4dde-ab8f-c2bf8451127c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.530641] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-4084b182-fca2-4979-9827-ccaa16da422f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] The volume has not been displaced from its original location: [datastore2] volume-3b4492e5-2043-4208-a34e-33c78329a761/volume-3b4492e5-2043-4208-a34e-33c78329a761.vmdk. No consolidation needed. {{(pid=68638) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1156.535909] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-4084b182-fca2-4979-9827-ccaa16da422f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Reconfiguring VM instance instance-0000006a to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1156.538547] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55b1156e-d529-435f-8cb4-fafc9bc98038 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.556738] env[68638]: DEBUG oslo_vmware.api [None req-4084b182-fca2-4979-9827-ccaa16da422f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 1156.556738] env[68638]: value = "task-2834476" [ 1156.556738] env[68638]: _type = "Task" [ 1156.556738] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.567690] env[68638]: DEBUG oslo_vmware.api [None req-4084b182-fca2-4979-9827-ccaa16da422f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834476, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.575680] env[68638]: INFO nova.compute.manager [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Shelve offloading [ 1156.596996] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d3e2c4-64ed-4e16-bb0a-fafe21377d6f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.605578] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feeb8345-03c9-4de3-98d9-1dec7f9e5033 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.639090] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a067f7e-4d97-4137-bba9-3fba855dddbe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.647334] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb9a540e-4117-45b9-b81b-9340bc8faed2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.663691] env[68638]: DEBUG nova.compute.provider_tree [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1156.669637] env[68638]: DEBUG nova.network.neutron [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Successfully created port: c9f8dd22-b056-4864-91c0-671a170e81bd {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1156.850678] env[68638]: DEBUG nova.compute.manager [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1156.889685] env[68638]: INFO nova.compute.manager [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Took 12.41 seconds to build instance. [ 1157.067793] env[68638]: DEBUG oslo_vmware.api [None req-4084b182-fca2-4979-9827-ccaa16da422f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834476, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.079571] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1157.079914] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9242241d-bf52-4cc3-8093-d151904aacd7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.087760] env[68638]: DEBUG oslo_vmware.api [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1157.087760] env[68638]: value = "task-2834477" [ 1157.087760] env[68638]: _type = "Task" [ 1157.087760] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.099358] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] VM already powered off {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1157.099590] env[68638]: DEBUG nova.compute.manager [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1157.100411] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd6ad54-8981-4518-8515-5c4012c90cd3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.106874] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquiring lock "refresh_cache-a576ba6f-5e3b-4408-b95d-2084a072ec12" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.107076] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquired lock "refresh_cache-a576ba6f-5e3b-4408-b95d-2084a072ec12" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1157.107270] env[68638]: DEBUG nova.network.neutron [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1157.166609] env[68638]: DEBUG nova.scheduler.client.report [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1157.392856] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6ac3dc1-8058-4955-abd9-b457f68066ec tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "00a22fef-5d10-4413-a9aa-070a1a863cdd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.922s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1157.571574] env[68638]: DEBUG oslo_vmware.api [None req-4084b182-fca2-4979-9827-ccaa16da422f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834476, 'name': ReconfigVM_Task, 'duration_secs': 0.593087} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.571957] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-4084b182-fca2-4979-9827-ccaa16da422f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Reconfigured VM instance instance-0000006a to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1157.579313] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd81d801-0360-420d-8ec1-b10c9f9f07f0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.602165] env[68638]: DEBUG oslo_vmware.api [None req-4084b182-fca2-4979-9827-ccaa16da422f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 1157.602165] env[68638]: value = "task-2834478" [ 1157.602165] env[68638]: _type = "Task" [ 1157.602165] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.613885] env[68638]: DEBUG oslo_vmware.api [None req-4084b182-fca2-4979-9827-ccaa16da422f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834478, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.672154] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.830s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1157.692882] env[68638]: INFO nova.scheduler.client.report [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Deleted allocations for instance c66805eb-fd97-4fe3-984d-8759f227d7fc [ 1157.814293] env[68638]: DEBUG nova.network.neutron [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Updating instance_info_cache with network_info: [{"id": "2725817f-dd0e-4f09-ba4d-70f48e578f8c", "address": "fa:16:3e:68:8a:2b", "network": {"id": "72c025a9-b352-4718-9ffb-469abb0f7099", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1791072145-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8938cbcafe93492e8f53613d992790bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2725817f-dd", "ovs_interfaceid": "2725817f-dd0e-4f09-ba4d-70f48e578f8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.862616] env[68638]: DEBUG nova.compute.manager [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1157.886676] env[68638]: DEBUG nova.virt.hardware [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1157.887671] env[68638]: DEBUG nova.virt.hardware [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1157.887671] env[68638]: DEBUG nova.virt.hardware [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1157.887671] env[68638]: DEBUG nova.virt.hardware [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1157.887671] env[68638]: DEBUG nova.virt.hardware [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1157.887671] env[68638]: DEBUG nova.virt.hardware [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1157.887912] env[68638]: DEBUG nova.virt.hardware [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1157.887912] env[68638]: DEBUG nova.virt.hardware [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1157.888091] env[68638]: DEBUG nova.virt.hardware [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1157.888281] env[68638]: DEBUG nova.virt.hardware [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1157.888454] env[68638]: DEBUG nova.virt.hardware [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1157.889673] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389113e2-2593-40a2-bf13-5c31cecca6e9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.898472] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ef071b6-5c40-48c1-a203-eefe020fad32 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.018360] env[68638]: DEBUG nova.compute.manager [req-32a189b7-e668-4b6b-b32a-d537f8ac2870 req-fd7d9841-8971-4baa-b7df-2019576b9015 service nova] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Received event network-vif-plugged-c9f8dd22-b056-4864-91c0-671a170e81bd {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1158.018584] env[68638]: DEBUG oslo_concurrency.lockutils [req-32a189b7-e668-4b6b-b32a-d537f8ac2870 req-fd7d9841-8971-4baa-b7df-2019576b9015 service nova] Acquiring lock "2e788c4c-f6d1-4001-9389-1068887d205f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.018798] env[68638]: DEBUG oslo_concurrency.lockutils [req-32a189b7-e668-4b6b-b32a-d537f8ac2870 req-fd7d9841-8971-4baa-b7df-2019576b9015 service nova] Lock "2e788c4c-f6d1-4001-9389-1068887d205f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.018969] env[68638]: DEBUG oslo_concurrency.lockutils [req-32a189b7-e668-4b6b-b32a-d537f8ac2870 req-fd7d9841-8971-4baa-b7df-2019576b9015 service nova] Lock "2e788c4c-f6d1-4001-9389-1068887d205f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.019212] env[68638]: DEBUG nova.compute.manager [req-32a189b7-e668-4b6b-b32a-d537f8ac2870 req-fd7d9841-8971-4baa-b7df-2019576b9015 service nova] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] No waiting events found dispatching network-vif-plugged-c9f8dd22-b056-4864-91c0-671a170e81bd {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1158.019318] env[68638]: WARNING nova.compute.manager [req-32a189b7-e668-4b6b-b32a-d537f8ac2870 req-fd7d9841-8971-4baa-b7df-2019576b9015 service nova] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Received unexpected event network-vif-plugged-c9f8dd22-b056-4864-91c0-671a170e81bd for instance with vm_state building and task_state spawning. [ 1158.097469] env[68638]: DEBUG nova.network.neutron [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Successfully updated port: c9f8dd22-b056-4864-91c0-671a170e81bd {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1158.113274] env[68638]: DEBUG oslo_vmware.api [None req-4084b182-fca2-4979-9827-ccaa16da422f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834478, 'name': ReconfigVM_Task, 'duration_secs': 0.164518} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.113591] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-4084b182-fca2-4979-9827-ccaa16da422f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570041', 'volume_id': '3b4492e5-2043-4208-a34e-33c78329a761', 'name': 'volume-3b4492e5-2043-4208-a34e-33c78329a761', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1', 'attached_at': '', 'detached_at': '', 'volume_id': '3b4492e5-2043-4208-a34e-33c78329a761', 'serial': '3b4492e5-2043-4208-a34e-33c78329a761'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1158.200591] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ab0dfe4-8a1d-49af-a128-f4245da29068 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "c66805eb-fd97-4fe3-984d-8759f227d7fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.318s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.317818] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Releasing lock "refresh_cache-a576ba6f-5e3b-4408-b95d-2084a072ec12" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1158.601543] env[68638]: DEBUG oslo_concurrency.lockutils [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "refresh_cache-2e788c4c-f6d1-4001-9389-1068887d205f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1158.601714] env[68638]: DEBUG oslo_concurrency.lockutils [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired lock "refresh_cache-2e788c4c-f6d1-4001-9389-1068887d205f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1158.601901] env[68638]: DEBUG nova.network.neutron [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1158.652402] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1158.653122] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a55bc65-5805-41b4-bbcd-ccbc0b67bdbf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.660156] env[68638]: DEBUG nova.objects.instance [None req-4084b182-fca2-4979-9827-ccaa16da422f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lazy-loading 'flavor' on Instance uuid 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1158.664382] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1158.664858] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ebb94345-7410-4064-a1a8-07712dfd48ba {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.799501] env[68638]: DEBUG nova.compute.manager [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Stashing vm_state: active {{(pid=68638) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1159.136921] env[68638]: DEBUG nova.network.neutron [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1159.268317] env[68638]: DEBUG nova.network.neutron [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Updating instance_info_cache with network_info: [{"id": "c9f8dd22-b056-4864-91c0-671a170e81bd", "address": "fa:16:3e:d1:2d:01", "network": {"id": "4ccf9e56-9fb3-48ff-bf2d-a35faedb905b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1191830363-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ae89c3992e04141bf24be9d9e84e302", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9f8dd22-b0", "ovs_interfaceid": "c9f8dd22-b056-4864-91c0-671a170e81bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1159.323790] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1159.324089] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1159.670664] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4084b182-fca2-4979-9827-ccaa16da422f tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.768s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.772072] env[68638]: DEBUG oslo_concurrency.lockutils [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Releasing lock "refresh_cache-2e788c4c-f6d1-4001-9389-1068887d205f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1159.772072] env[68638]: DEBUG nova.compute.manager [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Instance network_info: |[{"id": "c9f8dd22-b056-4864-91c0-671a170e81bd", "address": "fa:16:3e:d1:2d:01", "network": {"id": "4ccf9e56-9fb3-48ff-bf2d-a35faedb905b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1191830363-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ae89c3992e04141bf24be9d9e84e302", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9f8dd22-b0", "ovs_interfaceid": "c9f8dd22-b056-4864-91c0-671a170e81bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1159.772072] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:2d:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e547d234-640c-449b-8279-0b16f75d6627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c9f8dd22-b056-4864-91c0-671a170e81bd', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1159.779615] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1159.779880] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1159.780076] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-061ae10e-0df5-4608-a335-8a4c48d0ac15 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.803490] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1159.803490] env[68638]: value = "task-2834480" [ 1159.803490] env[68638]: _type = "Task" [ 1159.803490] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.811567] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834480, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.829372] env[68638]: INFO nova.compute.claims [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1159.960852] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1159.962072] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1159.962072] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Deleting the datastore file [datastore2] a576ba6f-5e3b-4408-b95d-2084a072ec12 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1159.962072] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b4647f9-f31b-4de4-93cc-12c291e1d4d9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.970719] env[68638]: DEBUG oslo_vmware.api [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1159.970719] env[68638]: value = "task-2834481" [ 1159.970719] env[68638]: _type = "Task" [ 1159.970719] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.000988] env[68638]: DEBUG oslo_vmware.api [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834481, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.048061] env[68638]: DEBUG nova.compute.manager [req-d7e8d5c8-c4a6-4918-b509-8fce8786e7b9 req-167017dc-f222-4fb9-ae3a-1912a4dcf9da service nova] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Received event network-changed-c9f8dd22-b056-4864-91c0-671a170e81bd {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1160.048345] env[68638]: DEBUG nova.compute.manager [req-d7e8d5c8-c4a6-4918-b509-8fce8786e7b9 req-167017dc-f222-4fb9-ae3a-1912a4dcf9da service nova] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Refreshing instance network info cache due to event network-changed-c9f8dd22-b056-4864-91c0-671a170e81bd. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1160.048572] env[68638]: DEBUG oslo_concurrency.lockutils [req-d7e8d5c8-c4a6-4918-b509-8fce8786e7b9 req-167017dc-f222-4fb9-ae3a-1912a4dcf9da service nova] Acquiring lock "refresh_cache-2e788c4c-f6d1-4001-9389-1068887d205f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.048794] env[68638]: DEBUG oslo_concurrency.lockutils [req-d7e8d5c8-c4a6-4918-b509-8fce8786e7b9 req-167017dc-f222-4fb9-ae3a-1912a4dcf9da service nova] Acquired lock "refresh_cache-2e788c4c-f6d1-4001-9389-1068887d205f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1160.049033] env[68638]: DEBUG nova.network.neutron [req-d7e8d5c8-c4a6-4918-b509-8fce8786e7b9 req-167017dc-f222-4fb9-ae3a-1912a4dcf9da service nova] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Refreshing network info cache for port c9f8dd22-b056-4864-91c0-671a170e81bd {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1160.157655] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "e2e74700-aa83-484a-a61f-9f98a6019fdb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.157980] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "e2e74700-aa83-484a-a61f-9f98a6019fdb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1160.218157] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c1bb43cb-f66a-4d5e-b82f-ea2ef3701207 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.218157] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c1bb43cb-f66a-4d5e-b82f-ea2ef3701207 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1160.313557] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834480, 'name': CreateVM_Task, 'duration_secs': 0.439088} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.313744] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1160.314510] env[68638]: DEBUG oslo_concurrency.lockutils [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.314610] env[68638]: DEBUG oslo_concurrency.lockutils [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1160.314945] env[68638]: DEBUG oslo_concurrency.lockutils [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1160.315236] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c1cd359-9672-450f-a69b-13055b6cee8c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.321041] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1160.321041] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52f0b2b6-d384-8066-c81a-10226322cb20" [ 1160.321041] env[68638]: _type = "Task" [ 1160.321041] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.328932] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f0b2b6-d384-8066-c81a-10226322cb20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.334505] env[68638]: INFO nova.compute.resource_tracker [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Updating resource usage from migration 3233046a-fd8f-452a-bd4e-9ee9e4fa379f [ 1160.482656] env[68638]: DEBUG oslo_vmware.api [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834481, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197374} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.482922] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1160.483127] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1160.483312] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1160.540228] env[68638]: INFO nova.scheduler.client.report [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Deleted allocations for instance a576ba6f-5e3b-4408-b95d-2084a072ec12 [ 1160.544914] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec7c0f69-832f-4e2a-8f8a-e8c454eff507 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.556906] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c87a24-5d62-4086-9863-43094151540d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.593844] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17e5e5f5-a704-4639-8363-92517e0ac6ce {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.604054] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5f75033-14df-47fd-8c6e-70176a587952 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.622757] env[68638]: DEBUG nova.compute.provider_tree [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1160.660426] env[68638]: DEBUG nova.compute.manager [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1160.720739] env[68638]: INFO nova.compute.manager [None req-c1bb43cb-f66a-4d5e-b82f-ea2ef3701207 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Detaching volume e4ecec83-473b-4605-9037-89f5aa298624 [ 1160.758918] env[68638]: INFO nova.virt.block_device [None req-c1bb43cb-f66a-4d5e-b82f-ea2ef3701207 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Attempting to driver detach volume e4ecec83-473b-4605-9037-89f5aa298624 from mountpoint /dev/sdc [ 1160.759183] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1bb43cb-f66a-4d5e-b82f-ea2ef3701207 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Volume detach. Driver type: vmdk {{(pid=68638) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1160.759411] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1bb43cb-f66a-4d5e-b82f-ea2ef3701207 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570044', 'volume_id': 'e4ecec83-473b-4605-9037-89f5aa298624', 'name': 'volume-e4ecec83-473b-4605-9037-89f5aa298624', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1', 'attached_at': '', 'detached_at': '', 'volume_id': 'e4ecec83-473b-4605-9037-89f5aa298624', 'serial': 'e4ecec83-473b-4605-9037-89f5aa298624'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1160.760254] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab3ea03e-1bc3-4f90-bed2-997d67ef2d8e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.784529] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aed0780-b9f1-4597-8984-8738a2066945 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.792250] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cac94c1-9762-4464-8e62-06b921944e22 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.813897] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fafa726-7243-4b5a-b322-9eb7253061d3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.817079] env[68638]: DEBUG nova.network.neutron [req-d7e8d5c8-c4a6-4918-b509-8fce8786e7b9 req-167017dc-f222-4fb9-ae3a-1912a4dcf9da service nova] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Updated VIF entry in instance network info cache for port c9f8dd22-b056-4864-91c0-671a170e81bd. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1160.817628] env[68638]: DEBUG nova.network.neutron [req-d7e8d5c8-c4a6-4918-b509-8fce8786e7b9 req-167017dc-f222-4fb9-ae3a-1912a4dcf9da service nova] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Updating instance_info_cache with network_info: [{"id": "c9f8dd22-b056-4864-91c0-671a170e81bd", "address": "fa:16:3e:d1:2d:01", "network": {"id": "4ccf9e56-9fb3-48ff-bf2d-a35faedb905b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1191830363-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ae89c3992e04141bf24be9d9e84e302", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9f8dd22-b0", "ovs_interfaceid": "c9f8dd22-b056-4864-91c0-671a170e81bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1160.835630] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1bb43cb-f66a-4d5e-b82f-ea2ef3701207 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] The volume has not been displaced from its original location: [datastore2] volume-e4ecec83-473b-4605-9037-89f5aa298624/volume-e4ecec83-473b-4605-9037-89f5aa298624.vmdk. No consolidation needed. {{(pid=68638) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1160.840882] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1bb43cb-f66a-4d5e-b82f-ea2ef3701207 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Reconfiguring VM instance instance-0000006a to detach disk 2002 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1160.841918] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c2611d9-a259-428d-95a4-94aefcffa034 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.858699] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f0b2b6-d384-8066-c81a-10226322cb20, 'name': SearchDatastore_Task, 'duration_secs': 0.011188} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.859502] env[68638]: DEBUG oslo_concurrency.lockutils [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1160.859691] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1160.859937] env[68638]: DEBUG oslo_concurrency.lockutils [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.860098] env[68638]: DEBUG oslo_concurrency.lockutils [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1160.860282] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1160.860542] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b3cf2a41-a36e-41b9-9844-1dae4bef6233 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.866078] env[68638]: DEBUG oslo_vmware.api [None req-c1bb43cb-f66a-4d5e-b82f-ea2ef3701207 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 1160.866078] env[68638]: value = "task-2834482" [ 1160.866078] env[68638]: _type = "Task" [ 1160.866078] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.871731] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1160.871902] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1160.875389] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73a97b71-03ac-4e92-9d65-80717408e5cd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.877545] env[68638]: DEBUG oslo_vmware.api [None req-c1bb43cb-f66a-4d5e-b82f-ea2ef3701207 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834482, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.881363] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1160.881363] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]527ca0a6-ae6d-7a4a-5812-0451c892c952" [ 1160.881363] env[68638]: _type = "Task" [ 1160.881363] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.891107] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527ca0a6-ae6d-7a4a-5812-0451c892c952, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.050799] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.125686] env[68638]: DEBUG nova.scheduler.client.report [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1161.183523] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.321233] env[68638]: DEBUG oslo_concurrency.lockutils [req-d7e8d5c8-c4a6-4918-b509-8fce8786e7b9 req-167017dc-f222-4fb9-ae3a-1912a4dcf9da service nova] Releasing lock "refresh_cache-2e788c4c-f6d1-4001-9389-1068887d205f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1161.321233] env[68638]: DEBUG nova.compute.manager [req-d7e8d5c8-c4a6-4918-b509-8fce8786e7b9 req-167017dc-f222-4fb9-ae3a-1912a4dcf9da service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Received event network-vif-unplugged-2725817f-dd0e-4f09-ba4d-70f48e578f8c {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1161.321233] env[68638]: DEBUG oslo_concurrency.lockutils [req-d7e8d5c8-c4a6-4918-b509-8fce8786e7b9 req-167017dc-f222-4fb9-ae3a-1912a4dcf9da service nova] Acquiring lock "a576ba6f-5e3b-4408-b95d-2084a072ec12-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.321233] env[68638]: DEBUG oslo_concurrency.lockutils [req-d7e8d5c8-c4a6-4918-b509-8fce8786e7b9 req-167017dc-f222-4fb9-ae3a-1912a4dcf9da service nova] Lock "a576ba6f-5e3b-4408-b95d-2084a072ec12-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.321589] env[68638]: DEBUG oslo_concurrency.lockutils [req-d7e8d5c8-c4a6-4918-b509-8fce8786e7b9 req-167017dc-f222-4fb9-ae3a-1912a4dcf9da service nova] Lock "a576ba6f-5e3b-4408-b95d-2084a072ec12-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.321589] env[68638]: DEBUG nova.compute.manager [req-d7e8d5c8-c4a6-4918-b509-8fce8786e7b9 req-167017dc-f222-4fb9-ae3a-1912a4dcf9da service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] No waiting events found dispatching network-vif-unplugged-2725817f-dd0e-4f09-ba4d-70f48e578f8c {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1161.322203] env[68638]: WARNING nova.compute.manager [req-d7e8d5c8-c4a6-4918-b509-8fce8786e7b9 req-167017dc-f222-4fb9-ae3a-1912a4dcf9da service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Received unexpected event network-vif-unplugged-2725817f-dd0e-4f09-ba4d-70f48e578f8c for instance with vm_state shelved and task_state shelving_offloading. [ 1161.322203] env[68638]: DEBUG nova.compute.manager [req-d7e8d5c8-c4a6-4918-b509-8fce8786e7b9 req-167017dc-f222-4fb9-ae3a-1912a4dcf9da service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Received event network-changed-2725817f-dd0e-4f09-ba4d-70f48e578f8c {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1161.322203] env[68638]: DEBUG nova.compute.manager [req-d7e8d5c8-c4a6-4918-b509-8fce8786e7b9 req-167017dc-f222-4fb9-ae3a-1912a4dcf9da service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Refreshing instance network info cache due to event network-changed-2725817f-dd0e-4f09-ba4d-70f48e578f8c. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1161.322461] env[68638]: DEBUG oslo_concurrency.lockutils [req-d7e8d5c8-c4a6-4918-b509-8fce8786e7b9 req-167017dc-f222-4fb9-ae3a-1912a4dcf9da service nova] Acquiring lock "refresh_cache-a576ba6f-5e3b-4408-b95d-2084a072ec12" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.322461] env[68638]: DEBUG oslo_concurrency.lockutils [req-d7e8d5c8-c4a6-4918-b509-8fce8786e7b9 req-167017dc-f222-4fb9-ae3a-1912a4dcf9da service nova] Acquired lock "refresh_cache-a576ba6f-5e3b-4408-b95d-2084a072ec12" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1161.322573] env[68638]: DEBUG nova.network.neutron [req-d7e8d5c8-c4a6-4918-b509-8fce8786e7b9 req-167017dc-f222-4fb9-ae3a-1912a4dcf9da service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Refreshing network info cache for port 2725817f-dd0e-4f09-ba4d-70f48e578f8c {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1161.377014] env[68638]: DEBUG oslo_vmware.api [None req-c1bb43cb-f66a-4d5e-b82f-ea2ef3701207 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834482, 'name': ReconfigVM_Task, 'duration_secs': 0.463289} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.377360] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1bb43cb-f66a-4d5e-b82f-ea2ef3701207 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Reconfigured VM instance instance-0000006a to detach disk 2002 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1161.381982] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81f30aa2-3170-4e8d-aff4-ce4c29a066b6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.400433] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527ca0a6-ae6d-7a4a-5812-0451c892c952, 'name': SearchDatastore_Task, 'duration_secs': 0.01151} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.402158] env[68638]: DEBUG oslo_vmware.api [None req-c1bb43cb-f66a-4d5e-b82f-ea2ef3701207 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 1161.402158] env[68638]: value = "task-2834483" [ 1161.402158] env[68638]: _type = "Task" [ 1161.402158] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.402359] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7221fe6-4b3f-4c85-921e-e42573b4c746 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.409911] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1161.409911] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52b546b2-0212-f228-3396-908de6b25170" [ 1161.409911] env[68638]: _type = "Task" [ 1161.409911] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.412908] env[68638]: DEBUG oslo_vmware.api [None req-c1bb43cb-f66a-4d5e-b82f-ea2ef3701207 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834483, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.420264] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b546b2-0212-f228-3396-908de6b25170, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.630857] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.307s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.631221] env[68638]: INFO nova.compute.manager [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Migrating [ 1161.637847] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.587s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.638132] env[68638]: DEBUG nova.objects.instance [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lazy-loading 'resources' on Instance uuid a576ba6f-5e3b-4408-b95d-2084a072ec12 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1161.915962] env[68638]: DEBUG oslo_vmware.api [None req-c1bb43cb-f66a-4d5e-b82f-ea2ef3701207 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834483, 'name': ReconfigVM_Task, 'duration_secs': 0.15827} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.919304] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1bb43cb-f66a-4d5e-b82f-ea2ef3701207 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570044', 'volume_id': 'e4ecec83-473b-4605-9037-89f5aa298624', 'name': 'volume-e4ecec83-473b-4605-9037-89f5aa298624', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1', 'attached_at': '', 'detached_at': '', 'volume_id': 'e4ecec83-473b-4605-9037-89f5aa298624', 'serial': 'e4ecec83-473b-4605-9037-89f5aa298624'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1161.927028] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52b546b2-0212-f228-3396-908de6b25170, 'name': SearchDatastore_Task, 'duration_secs': 0.013257} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.927317] env[68638]: DEBUG oslo_concurrency.lockutils [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1161.927581] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 2e788c4c-f6d1-4001-9389-1068887d205f/2e788c4c-f6d1-4001-9389-1068887d205f.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1161.927832] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-581039d1-c662-474d-a7ab-fca640748ef3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.937751] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1161.937751] env[68638]: value = "task-2834484" [ 1161.937751] env[68638]: _type = "Task" [ 1161.937751] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.945814] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834484, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.016149] env[68638]: DEBUG nova.network.neutron [req-d7e8d5c8-c4a6-4918-b509-8fce8786e7b9 req-167017dc-f222-4fb9-ae3a-1912a4dcf9da service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Updated VIF entry in instance network info cache for port 2725817f-dd0e-4f09-ba4d-70f48e578f8c. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1162.016521] env[68638]: DEBUG nova.network.neutron [req-d7e8d5c8-c4a6-4918-b509-8fce8786e7b9 req-167017dc-f222-4fb9-ae3a-1912a4dcf9da service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Updating instance_info_cache with network_info: [{"id": "2725817f-dd0e-4f09-ba4d-70f48e578f8c", "address": "fa:16:3e:68:8a:2b", "network": {"id": "72c025a9-b352-4718-9ffb-469abb0f7099", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1791072145-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8938cbcafe93492e8f53613d992790bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap2725817f-dd", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1162.146628] env[68638]: DEBUG nova.objects.instance [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lazy-loading 'numa_topology' on Instance uuid a576ba6f-5e3b-4408-b95d-2084a072ec12 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1162.147994] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "refresh_cache-00a22fef-5d10-4413-a9aa-070a1a863cdd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1162.148207] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired lock "refresh_cache-00a22fef-5d10-4413-a9aa-070a1a863cdd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1162.148391] env[68638]: DEBUG nova.network.neutron [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1162.191696] env[68638]: DEBUG oslo_concurrency.lockutils [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquiring lock "a576ba6f-5e3b-4408-b95d-2084a072ec12" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1162.457538] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834484, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.463136] env[68638]: DEBUG nova.objects.instance [None req-c1bb43cb-f66a-4d5e-b82f-ea2ef3701207 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lazy-loading 'flavor' on Instance uuid 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1162.519518] env[68638]: DEBUG oslo_concurrency.lockutils [req-d7e8d5c8-c4a6-4918-b509-8fce8786e7b9 req-167017dc-f222-4fb9-ae3a-1912a4dcf9da service nova] Releasing lock "refresh_cache-a576ba6f-5e3b-4408-b95d-2084a072ec12" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1162.652693] env[68638]: DEBUG nova.objects.base [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=68638) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1162.835525] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdddff0e-c65d-44ab-a1b6-59585874db88 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.841952] env[68638]: DEBUG nova.network.neutron [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Updating instance_info_cache with network_info: [{"id": "b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8", "address": "fa:16:3e:d6:46:73", "network": {"id": "e7719a30-81aa-48f1-a272-5246f78d9891", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1890376720-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fdd5447a0546b7b0fe2ed9ea0efc73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1a8c37a-0e", "ovs_interfaceid": "b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1162.848091] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97a02e04-6f4d-456e-a7a4-4fb89d2f9f2d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.881224] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e4a6b4-a270-49a0-8cf0-f5e6bfe4890a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.889688] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf5fd96-121e-4906-924c-5c368773b601 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.903773] env[68638]: DEBUG nova.compute.provider_tree [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1162.948932] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834484, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.604069} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.949215] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 2e788c4c-f6d1-4001-9389-1068887d205f/2e788c4c-f6d1-4001-9389-1068887d205f.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1162.949432] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1162.949710] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d8a057f6-4dbc-4856-bf6c-c854f7c2eaab {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.956779] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1162.956779] env[68638]: value = "task-2834485" [ 1162.956779] env[68638]: _type = "Task" [ 1162.956779] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.964260] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834485, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.054901] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8a52334a-f49a-429d-9039-102aa3d0e95a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.055192] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8a52334a-f49a-429d-9039-102aa3d0e95a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1163.055432] env[68638]: INFO nova.compute.manager [None req-8a52334a-f49a-429d-9039-102aa3d0e95a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Rebooting instance [ 1163.345317] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Releasing lock "refresh_cache-00a22fef-5d10-4413-a9aa-070a1a863cdd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1163.406682] env[68638]: DEBUG nova.scheduler.client.report [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1163.467376] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834485, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085224} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.467658] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1163.468509] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b889b8-49f1-4ac3-8e2c-c8b2408c7b66 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.470958] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c1bb43cb-f66a-4d5e-b82f-ea2ef3701207 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.253s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1163.493672] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Reconfiguring VM instance instance-00000074 to attach disk [datastore2] 2e788c4c-f6d1-4001-9389-1068887d205f/2e788c4c-f6d1-4001-9389-1068887d205f.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1163.494553] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b0b2c6b-ef7b-43bf-ab03-b2da97bab824 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.514681] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1163.514681] env[68638]: value = "task-2834486" [ 1163.514681] env[68638]: _type = "Task" [ 1163.514681] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.522736] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834486, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.573831] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8a52334a-f49a-429d-9039-102aa3d0e95a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "refresh_cache-58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.574146] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8a52334a-f49a-429d-9039-102aa3d0e95a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired lock "refresh_cache-58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1163.574408] env[68638]: DEBUG nova.network.neutron [None req-8a52334a-f49a-429d-9039-102aa3d0e95a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1163.911706] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.274s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1163.914478] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.732s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1163.916185] env[68638]: INFO nova.compute.claims [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1164.025078] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834486, 'name': ReconfigVM_Task, 'duration_secs': 0.309705} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.025450] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Reconfigured VM instance instance-00000074 to attach disk [datastore2] 2e788c4c-f6d1-4001-9389-1068887d205f/2e788c4c-f6d1-4001-9389-1068887d205f.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1164.026073] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c5d3d164-9d8e-49b6-8a7c-27418accdf8f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.033772] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1164.033772] env[68638]: value = "task-2834487" [ 1164.033772] env[68638]: _type = "Task" [ 1164.033772] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.041950] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834487, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.349554] env[68638]: DEBUG nova.network.neutron [None req-8a52334a-f49a-429d-9039-102aa3d0e95a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Updating instance_info_cache with network_info: [{"id": "ed59a3ef-d65c-48e6-9271-4552c024c365", "address": "fa:16:3e:18:6d:07", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped59a3ef-d6", "ovs_interfaceid": "ed59a3ef-d65c-48e6-9271-4552c024c365", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.424871] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c22825c3-b3db-434a-b5e6-907dc5bcd01c tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "a576ba6f-5e3b-4408-b95d-2084a072ec12" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 23.453s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.425807] env[68638]: DEBUG oslo_concurrency.lockutils [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "a576ba6f-5e3b-4408-b95d-2084a072ec12" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 2.234s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1164.425853] env[68638]: INFO nova.compute.manager [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Unshelving [ 1164.543513] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834487, 'name': Rename_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.613646] env[68638]: DEBUG oslo_concurrency.lockutils [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1164.613911] env[68638]: DEBUG oslo_concurrency.lockutils [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1164.614148] env[68638]: DEBUG oslo_concurrency.lockutils [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1164.614334] env[68638]: DEBUG oslo_concurrency.lockutils [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1164.614502] env[68638]: DEBUG oslo_concurrency.lockutils [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.616641] env[68638]: INFO nova.compute.manager [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Terminating instance [ 1164.852244] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8a52334a-f49a-429d-9039-102aa3d0e95a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Releasing lock "refresh_cache-58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1164.858861] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5af2d5bd-d297-459b-9fc0-39cbac576bdb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.880481] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Updating instance '00a22fef-5d10-4413-a9aa-070a1a863cdd' progress to 0 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1165.045649] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834487, 'name': Rename_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.098243] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-483708b7-f178-4f6a-a490-1331672de2ce {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.107027] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0736c58f-24d1-4877-a3bb-22399b0445f0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.135713] env[68638]: DEBUG nova.compute.manager [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1165.136025] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1165.136892] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d71227-d1a1-4655-b6c1-038568895633 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.140299] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05a898dc-1916-4f6d-bfa8-1c82f4c4d047 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.147939] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1165.149686] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0401d602-7f3b-41ba-bd54-a3a602b32360 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.152160] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc628eb-3251-432a-a71b-fefd35224f5d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.168933] env[68638]: DEBUG nova.compute.provider_tree [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1165.171595] env[68638]: DEBUG oslo_vmware.api [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 1165.171595] env[68638]: value = "task-2834488" [ 1165.171595] env[68638]: _type = "Task" [ 1165.171595] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.179820] env[68638]: DEBUG oslo_vmware.api [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834488, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.356569] env[68638]: DEBUG nova.compute.manager [None req-8a52334a-f49a-429d-9039-102aa3d0e95a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1165.357548] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e0f9751-a681-4965-bfc4-95f44969287a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.386320] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1165.386623] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4b4d775d-1a4a-4aa6-8bcc-b10b68b177db {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.396045] env[68638]: DEBUG oslo_vmware.api [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1165.396045] env[68638]: value = "task-2834489" [ 1165.396045] env[68638]: _type = "Task" [ 1165.396045] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.405684] env[68638]: DEBUG oslo_vmware.api [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834489, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.450313] env[68638]: DEBUG oslo_concurrency.lockutils [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1165.547764] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834487, 'name': Rename_Task, 'duration_secs': 1.168618} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.548111] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1165.548364] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bf303088-a253-4964-b915-5c4899066c5e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.556103] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1165.556103] env[68638]: value = "task-2834490" [ 1165.556103] env[68638]: _type = "Task" [ 1165.556103] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.575718] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834490, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.673547] env[68638]: DEBUG nova.scheduler.client.report [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1165.685922] env[68638]: DEBUG oslo_vmware.api [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834488, 'name': PowerOffVM_Task, 'duration_secs': 0.246757} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.686225] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1165.686415] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1165.686781] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3ff80e83-2e00-44da-9e1d-ffddb823f66e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.750921] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1165.751244] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1165.751493] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Deleting the datastore file [datastore2] 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1165.751971] env[68638]: DEBUG oslo_concurrency.lockutils [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "e0903192-4fa7-437a-9023-33e8e65124e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1165.752259] env[68638]: DEBUG oslo_concurrency.lockutils [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "e0903192-4fa7-437a-9023-33e8e65124e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1165.752527] env[68638]: DEBUG oslo_concurrency.lockutils [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "e0903192-4fa7-437a-9023-33e8e65124e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1165.752781] env[68638]: DEBUG oslo_concurrency.lockutils [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "e0903192-4fa7-437a-9023-33e8e65124e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1165.752962] env[68638]: DEBUG oslo_concurrency.lockutils [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "e0903192-4fa7-437a-9023-33e8e65124e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.754690] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9415d6d2-34a6-45be-ad14-e93f4707b363 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.757141] env[68638]: INFO nova.compute.manager [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Terminating instance [ 1165.764631] env[68638]: DEBUG oslo_vmware.api [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for the task: (returnval){ [ 1165.764631] env[68638]: value = "task-2834492" [ 1165.764631] env[68638]: _type = "Task" [ 1165.764631] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.773533] env[68638]: DEBUG oslo_vmware.api [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834492, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.907360] env[68638]: DEBUG oslo_vmware.api [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834489, 'name': PowerOffVM_Task, 'duration_secs': 0.194662} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.907676] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1165.907894] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Updating instance '00a22fef-5d10-4413-a9aa-070a1a863cdd' progress to 17 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1166.066429] env[68638]: DEBUG oslo_vmware.api [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834490, 'name': PowerOnVM_Task, 'duration_secs': 0.483292} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.066702] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1166.066907] env[68638]: INFO nova.compute.manager [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Took 8.20 seconds to spawn the instance on the hypervisor. [ 1166.067140] env[68638]: DEBUG nova.compute.manager [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1166.067874] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-598a6f72-ded4-407e-b424-29d3394678ab {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.181185] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.267s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.181743] env[68638]: DEBUG nova.compute.manager [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1166.184453] env[68638]: DEBUG oslo_concurrency.lockutils [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.734s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.184668] env[68638]: DEBUG nova.objects.instance [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lazy-loading 'pci_requests' on Instance uuid a576ba6f-5e3b-4408-b95d-2084a072ec12 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1166.261103] env[68638]: DEBUG nova.compute.manager [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1166.261672] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1166.261957] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1efb28c5-c7eb-4ac2-a226-c53efd868700 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.269453] env[68638]: DEBUG oslo_vmware.api [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1166.269453] env[68638]: value = "task-2834493" [ 1166.269453] env[68638]: _type = "Task" [ 1166.269453] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.276286] env[68638]: DEBUG oslo_vmware.api [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Task: {'id': task-2834492, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138124} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.276948] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1166.277303] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1166.277540] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1166.277759] env[68638]: INFO nova.compute.manager [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1166.277967] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1166.278197] env[68638]: DEBUG nova.compute.manager [-] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1166.278316] env[68638]: DEBUG nova.network.neutron [-] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1166.283240] env[68638]: DEBUG oslo_vmware.api [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834493, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.375854] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d17e8766-8426-45ff-be11-9b5ee0545d86 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.383517] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-8a52334a-f49a-429d-9039-102aa3d0e95a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Doing hard reboot of VM {{(pid=68638) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1166.383814] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-aef5dc66-c27d-48db-9301-bd0945df4f2f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.391063] env[68638]: DEBUG oslo_vmware.api [None req-8a52334a-f49a-429d-9039-102aa3d0e95a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1166.391063] env[68638]: value = "task-2834494" [ 1166.391063] env[68638]: _type = "Task" [ 1166.391063] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.406662] env[68638]: DEBUG oslo_vmware.api [None req-8a52334a-f49a-429d-9039-102aa3d0e95a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834494, 'name': ResetVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.414305] env[68638]: DEBUG nova.virt.hardware [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1166.414713] env[68638]: DEBUG nova.virt.hardware [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1166.414924] env[68638]: DEBUG nova.virt.hardware [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1166.415343] env[68638]: DEBUG nova.virt.hardware [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1166.415565] env[68638]: DEBUG nova.virt.hardware [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1166.415756] env[68638]: DEBUG nova.virt.hardware [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1166.415979] env[68638]: DEBUG nova.virt.hardware [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1166.416165] env[68638]: DEBUG nova.virt.hardware [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1166.416338] env[68638]: DEBUG nova.virt.hardware [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1166.416510] env[68638]: DEBUG nova.virt.hardware [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1166.416684] env[68638]: DEBUG nova.virt.hardware [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1166.422878] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c2b80c7-9c98-40ef-85ef-0d03449251ea {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.451982] env[68638]: DEBUG oslo_vmware.api [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1166.451982] env[68638]: value = "task-2834495" [ 1166.451982] env[68638]: _type = "Task" [ 1166.451982] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.461013] env[68638]: DEBUG oslo_vmware.api [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834495, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.586076] env[68638]: INFO nova.compute.manager [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Took 13.11 seconds to build instance. [ 1166.683368] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._sync_power_states {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.687481] env[68638]: DEBUG nova.compute.utils [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1166.690974] env[68638]: DEBUG nova.objects.instance [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lazy-loading 'numa_topology' on Instance uuid a576ba6f-5e3b-4408-b95d-2084a072ec12 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1166.691466] env[68638]: DEBUG nova.compute.manager [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1166.691638] env[68638]: DEBUG nova.network.neutron [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1166.779281] env[68638]: DEBUG oslo_vmware.api [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834493, 'name': PowerOffVM_Task, 'duration_secs': 0.233862} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.779560] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1166.779757] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Volume detach. Driver type: vmdk {{(pid=68638) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1166.779947] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570016', 'volume_id': 'aba426ca-0b6c-4510-8544-7a9bd4b9af38', 'name': 'volume-aba426ca-0b6c-4510-8544-7a9bd4b9af38', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'e0903192-4fa7-437a-9023-33e8e65124e3', 'attached_at': '2025-03-07T02:37:20.000000', 'detached_at': '', 'volume_id': 'aba426ca-0b6c-4510-8544-7a9bd4b9af38', 'serial': 'aba426ca-0b6c-4510-8544-7a9bd4b9af38'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1166.780727] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccdf8b1a-d3c9-4967-aff9-174a6572879f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.800358] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0f37108-d408-4521-ad8b-003120c7d534 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.805323] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df68029a-5364-4322-84ec-001e13d8956b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.825027] env[68638]: DEBUG nova.policy [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a5fce0bf2fb44b84afd238d875790fbd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ccc24eaf6cf74d539558c0a736e18c3e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1166.828116] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43a1fc9c-1654-46c5-9ca6-ca3340b1411d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.844045] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] The volume has not been displaced from its original location: [datastore1] volume-aba426ca-0b6c-4510-8544-7a9bd4b9af38/volume-aba426ca-0b6c-4510-8544-7a9bd4b9af38.vmdk. No consolidation needed. {{(pid=68638) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1166.849254] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Reconfiguring VM instance instance-0000006b to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1166.849540] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31f2df83-db11-4f16-8cf4-545fc42da98e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.867361] env[68638]: DEBUG oslo_vmware.api [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1166.867361] env[68638]: value = "task-2834496" [ 1166.867361] env[68638]: _type = "Task" [ 1166.867361] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.878381] env[68638]: DEBUG oslo_vmware.api [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834496, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.899071] env[68638]: DEBUG oslo_vmware.api [None req-8a52334a-f49a-429d-9039-102aa3d0e95a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834494, 'name': ResetVM_Task, 'duration_secs': 0.108097} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.899338] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-8a52334a-f49a-429d-9039-102aa3d0e95a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Did hard reboot of VM {{(pid=68638) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1166.899572] env[68638]: DEBUG nova.compute.manager [None req-8a52334a-f49a-429d-9039-102aa3d0e95a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1166.900274] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e19a94-25ae-469f-ae06-71a80db79434 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.961957] env[68638]: DEBUG oslo_vmware.api [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834495, 'name': ReconfigVM_Task, 'duration_secs': 0.212518} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.963106] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Updating instance '00a22fef-5d10-4413-a9aa-070a1a863cdd' progress to 33 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1167.088827] env[68638]: DEBUG oslo_concurrency.lockutils [None req-075f8c70-fdeb-4370-9e52-46463ae34996 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "2e788c4c-f6d1-4001-9389-1068887d205f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.620s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.187723] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Getting list of instances from cluster (obj){ [ 1167.187723] env[68638]: value = "domain-c8" [ 1167.187723] env[68638]: _type = "ClusterComputeResource" [ 1167.187723] env[68638]: } {{(pid=68638) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1167.190309] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c23ef65-66fa-4a9f-aa3f-b684d28b4320 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.194962] env[68638]: DEBUG nova.compute.manager [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1167.199553] env[68638]: INFO nova.compute.claims [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1167.204722] env[68638]: DEBUG nova.compute.manager [req-a013daed-6e11-4e4d-81a5-5856068971ad req-09ebbcb2-a522-4b3c-ae24-bab80cc67fbc service nova] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Received event network-vif-deleted-fe93833c-d268-4ad4-8246-17c09472e5db {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1167.205036] env[68638]: INFO nova.compute.manager [req-a013daed-6e11-4e4d-81a5-5856068971ad req-09ebbcb2-a522-4b3c-ae24-bab80cc67fbc service nova] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Neutron deleted interface fe93833c-d268-4ad4-8246-17c09472e5db; detaching it from the instance and deleting it from the info cache [ 1167.205652] env[68638]: DEBUG nova.network.neutron [req-a013daed-6e11-4e4d-81a5-5856068971ad req-09ebbcb2-a522-4b3c-ae24-bab80cc67fbc service nova] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.225168] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Got total of 11 instances {{(pid=68638) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1167.226394] env[68638]: WARNING nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] While synchronizing instance power states, found 13 instances in the database and 11 instances on the hypervisor. [ 1167.226622] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Triggering sync for uuid 423af2cc-4dea-445f-a01c-6d4d57c3f0de {{(pid=68638) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1167.226885] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Triggering sync for uuid 53e92f51-9010-4fb2-89e1-9d16a252ef6e {{(pid=68638) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1167.227143] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Triggering sync for uuid df2e066d-7c71-4aec-ab9b-a339a7ff21fb {{(pid=68638) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1167.227468] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Triggering sync for uuid 7d99d946-f2df-4d31-911f-ac479849b901 {{(pid=68638) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1167.227895] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Triggering sync for uuid cc2e9758-45ee-4e94-ad74-ba7d6c85f06d {{(pid=68638) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1167.228242] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Triggering sync for uuid ace44b04-6dcf-4845-af4e-b28ddeebe60e {{(pid=68638) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1167.228410] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Triggering sync for uuid 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9 {{(pid=68638) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1167.228603] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Triggering sync for uuid 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1 {{(pid=68638) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1167.228867] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Triggering sync for uuid e0903192-4fa7-437a-9023-33e8e65124e3 {{(pid=68638) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1167.229012] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Triggering sync for uuid 25c35c36-71c9-48cd-b7e4-6293eef890e5 {{(pid=68638) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1167.229228] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Triggering sync for uuid 00a22fef-5d10-4413-a9aa-070a1a863cdd {{(pid=68638) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1167.229790] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Triggering sync for uuid 2e788c4c-f6d1-4001-9389-1068887d205f {{(pid=68638) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1167.229790] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Triggering sync for uuid e2e74700-aa83-484a-a61f-9f98a6019fdb {{(pid=68638) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1167.230086] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "423af2cc-4dea-445f-a01c-6d4d57c3f0de" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.230360] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "423af2cc-4dea-445f-a01c-6d4d57c3f0de" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.230712] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "53e92f51-9010-4fb2-89e1-9d16a252ef6e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.230954] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "53e92f51-9010-4fb2-89e1-9d16a252ef6e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.231388] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "df2e066d-7c71-4aec-ab9b-a339a7ff21fb" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.231493] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "df2e066d-7c71-4aec-ab9b-a339a7ff21fb" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.231761] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "7d99d946-f2df-4d31-911f-ac479849b901" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.232086] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "7d99d946-f2df-4d31-911f-ac479849b901" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.233142] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "cc2e9758-45ee-4e94-ad74-ba7d6c85f06d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.233142] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "cc2e9758-45ee-4e94-ad74-ba7d6c85f06d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.233142] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "ace44b04-6dcf-4845-af4e-b28ddeebe60e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.233142] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "ace44b04-6dcf-4845-af4e-b28ddeebe60e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.233281] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.234240] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.234832] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "e0903192-4fa7-437a-9023-33e8e65124e3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.235252] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "25c35c36-71c9-48cd-b7e4-6293eef890e5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.235544] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "25c35c36-71c9-48cd-b7e4-6293eef890e5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.235850] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "00a22fef-5d10-4413-a9aa-070a1a863cdd" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.236110] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "00a22fef-5d10-4413-a9aa-070a1a863cdd" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.236353] env[68638]: INFO nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] During sync_power_state the instance has a pending task (resize_migrating). Skip. [ 1167.236590] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "00a22fef-5d10-4413-a9aa-070a1a863cdd" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.236842] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "2e788c4c-f6d1-4001-9389-1068887d205f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.237089] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "2e788c4c-f6d1-4001-9389-1068887d205f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.237392] env[68638]: INFO nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] During sync_power_state the instance has a pending task (resize_prep). Skip. [ 1167.237633] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "2e788c4c-f6d1-4001-9389-1068887d205f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.237881] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "e2e74700-aa83-484a-a61f-9f98a6019fdb" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.239675] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6260cfaa-475c-402c-a257-f399843894f5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.243383] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59bf4252-c2b6-445d-a650-be0d7c0f45bf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.248256] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8fa5e4d-deb0-4ae4-af41-d82334e65bfb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.251283] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a86788-23d8-4baa-a5f6-4434b8cff6a9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.253982] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a0daa4c-1af3-494b-b5a5-cce8b2169464 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.256608] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9bae7cf-c2e7-43fb-9a66-414eab0d94a9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.260129] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e92ad4-50b6-4e2e-b928-436afbe5c5a1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.271086] env[68638]: DEBUG nova.network.neutron [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Successfully created port: da4e63a0-6fb2-436b-b720-8c1e1b21decf {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1167.284043] env[68638]: WARNING oslo_messaging._drivers.amqpdriver [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1167.377964] env[68638]: DEBUG oslo_vmware.api [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834496, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.410919] env[68638]: DEBUG oslo_concurrency.lockutils [None req-8a52334a-f49a-429d-9039-102aa3d0e95a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.356s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.411857] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.179s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.412108] env[68638]: INFO nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] During sync_power_state the instance has a pending task (reboot_started_hard). Skip. [ 1167.412197] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.468278] env[68638]: DEBUG nova.virt.hardware [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1167.468571] env[68638]: DEBUG nova.virt.hardware [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1167.468707] env[68638]: DEBUG nova.virt.hardware [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1167.468888] env[68638]: DEBUG nova.virt.hardware [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1167.469051] env[68638]: DEBUG nova.virt.hardware [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1167.469203] env[68638]: DEBUG nova.virt.hardware [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1167.469412] env[68638]: DEBUG nova.virt.hardware [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1167.469573] env[68638]: DEBUG nova.virt.hardware [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1167.469742] env[68638]: DEBUG nova.virt.hardware [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1167.469907] env[68638]: DEBUG nova.virt.hardware [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1167.470097] env[68638]: DEBUG nova.virt.hardware [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1167.475561] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Reconfiguring VM instance instance-00000073 to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1167.475853] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce9cfbe9-0820-4d2e-b25e-dcc35ce3d0d4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.495822] env[68638]: DEBUG oslo_vmware.api [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1167.495822] env[68638]: value = "task-2834497" [ 1167.495822] env[68638]: _type = "Task" [ 1167.495822] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.508409] env[68638]: DEBUG oslo_vmware.api [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834497, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.593881] env[68638]: DEBUG nova.compute.manager [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Stashing vm_state: active {{(pid=68638) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1167.695550] env[68638]: DEBUG nova.network.neutron [-] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.711165] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f0e1425c-c042-4532-80f4-28790a4696c0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.721835] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afdbab74-3704-4981-925a-abde0b848e68 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.758426] env[68638]: DEBUG nova.compute.manager [req-a013daed-6e11-4e4d-81a5-5856068971ad req-09ebbcb2-a522-4b3c-ae24-bab80cc67fbc service nova] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Detach interface failed, port_id=fe93833c-d268-4ad4-8246-17c09472e5db, reason: Instance 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1167.779918] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "53e92f51-9010-4fb2-89e1-9d16a252ef6e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.549s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.786871] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "df2e066d-7c71-4aec-ab9b-a339a7ff21fb" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.555s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.787262] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "7d99d946-f2df-4d31-911f-ac479849b901" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.555s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.787563] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "423af2cc-4dea-445f-a01c-6d4d57c3f0de" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.557s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.789992] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "cc2e9758-45ee-4e94-ad74-ba7d6c85f06d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.557s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.792414] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "ace44b04-6dcf-4845-af4e-b28ddeebe60e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.559s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.792727] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "25c35c36-71c9-48cd-b7e4-6293eef890e5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.557s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.877894] env[68638]: DEBUG oslo_vmware.api [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834496, 'name': ReconfigVM_Task, 'duration_secs': 0.903266} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.878205] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Reconfigured VM instance instance-0000006b to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1167.883183] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b81f37f6-91fa-4830-bd86-74b6b04f2f61 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.898409] env[68638]: DEBUG oslo_vmware.api [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1167.898409] env[68638]: value = "task-2834498" [ 1167.898409] env[68638]: _type = "Task" [ 1167.898409] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.907953] env[68638]: DEBUG oslo_vmware.api [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834498, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.006259] env[68638]: DEBUG oslo_vmware.api [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834497, 'name': ReconfigVM_Task, 'duration_secs': 0.275456} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.006604] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Reconfigured VM instance instance-00000073 to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1168.007512] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbdb361d-044a-43f9-a5d2-8d82a98884d2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.029820] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] 00a22fef-5d10-4413-a9aa-070a1a863cdd/00a22fef-5d10-4413-a9aa-070a1a863cdd.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1168.030168] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70750e9c-0d83-4005-8a8b-c0a0d79dea93 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.048468] env[68638]: DEBUG oslo_vmware.api [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1168.048468] env[68638]: value = "task-2834499" [ 1168.048468] env[68638]: _type = "Task" [ 1168.048468] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.056358] env[68638]: DEBUG oslo_vmware.api [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834499, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.115351] env[68638]: DEBUG oslo_concurrency.lockutils [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.199268] env[68638]: INFO nova.compute.manager [-] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Took 1.92 seconds to deallocate network for instance. [ 1168.211306] env[68638]: DEBUG nova.compute.manager [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1168.237635] env[68638]: DEBUG nova.virt.hardware [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1168.238062] env[68638]: DEBUG nova.virt.hardware [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1168.238335] env[68638]: DEBUG nova.virt.hardware [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1168.238642] env[68638]: DEBUG nova.virt.hardware [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1168.238863] env[68638]: DEBUG nova.virt.hardware [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1168.239100] env[68638]: DEBUG nova.virt.hardware [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1168.239423] env[68638]: DEBUG nova.virt.hardware [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1168.239687] env[68638]: DEBUG nova.virt.hardware [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1168.239960] env[68638]: DEBUG nova.virt.hardware [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1168.240238] env[68638]: DEBUG nova.virt.hardware [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1168.240530] env[68638]: DEBUG nova.virt.hardware [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1168.242517] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30043f8f-951b-4fbb-94da-9cba05fb7ffb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.255398] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43dc260-a0ee-45b2-a775-6d826c22bff6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.411780] env[68638]: DEBUG oslo_vmware.api [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834498, 'name': ReconfigVM_Task, 'duration_secs': 0.166113} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.412148] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570016', 'volume_id': 'aba426ca-0b6c-4510-8544-7a9bd4b9af38', 'name': 'volume-aba426ca-0b6c-4510-8544-7a9bd4b9af38', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'e0903192-4fa7-437a-9023-33e8e65124e3', 'attached_at': '2025-03-07T02:37:20.000000', 'detached_at': '', 'volume_id': 'aba426ca-0b6c-4510-8544-7a9bd4b9af38', 'serial': 'aba426ca-0b6c-4510-8544-7a9bd4b9af38'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1168.412458] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1168.413170] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c7ef37-fac2-4ab2-a502-2bfa6967db10 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.421701] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1168.421936] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d187d3aa-aa65-4ca6-b989-6bdb5ea7336a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.427097] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e2ad2b3-d585-4bac-90ce-57b574282e10 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.433633] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-310168a9-f6b7-4ea9-a3f5-171b4f530064 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.463734] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1598f8-353d-4786-84ca-26deee21cc37 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.471338] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48411ff9-a632-425c-94a0-863102cf1b2f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.484429] env[68638]: DEBUG nova.compute.provider_tree [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1168.559760] env[68638]: DEBUG oslo_vmware.api [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834499, 'name': ReconfigVM_Task, 'duration_secs': 0.265236} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.559858] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Reconfigured VM instance instance-00000073 to attach disk [datastore2] 00a22fef-5d10-4413-a9aa-070a1a863cdd/00a22fef-5d10-4413-a9aa-070a1a863cdd.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1168.560161] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Updating instance '00a22fef-5d10-4413-a9aa-070a1a863cdd' progress to 50 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1168.705958] env[68638]: DEBUG oslo_concurrency.lockutils [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.752249] env[68638]: DEBUG nova.network.neutron [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Successfully updated port: da4e63a0-6fb2-436b-b720-8c1e1b21decf {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1168.987682] env[68638]: DEBUG nova.scheduler.client.report [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1169.067187] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8128f2c-0cfd-4e3d-bb2e-1c4af83a442a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.087712] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a99f1b8-80b1-4f6f-a975-1edc9ad6ac02 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.104877] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Updating instance '00a22fef-5d10-4413-a9aa-070a1a863cdd' progress to 67 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1169.232529] env[68638]: DEBUG nova.compute.manager [req-2bd7ea6f-e3ac-4f5f-ad29-904af2959e4e req-308a81c1-27ad-40e1-947a-d87cda0aac3d service nova] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Received event network-vif-plugged-da4e63a0-6fb2-436b-b720-8c1e1b21decf {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1169.232757] env[68638]: DEBUG oslo_concurrency.lockutils [req-2bd7ea6f-e3ac-4f5f-ad29-904af2959e4e req-308a81c1-27ad-40e1-947a-d87cda0aac3d service nova] Acquiring lock "e2e74700-aa83-484a-a61f-9f98a6019fdb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1169.232966] env[68638]: DEBUG oslo_concurrency.lockutils [req-2bd7ea6f-e3ac-4f5f-ad29-904af2959e4e req-308a81c1-27ad-40e1-947a-d87cda0aac3d service nova] Lock "e2e74700-aa83-484a-a61f-9f98a6019fdb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1169.233153] env[68638]: DEBUG oslo_concurrency.lockutils [req-2bd7ea6f-e3ac-4f5f-ad29-904af2959e4e req-308a81c1-27ad-40e1-947a-d87cda0aac3d service nova] Lock "e2e74700-aa83-484a-a61f-9f98a6019fdb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.233415] env[68638]: DEBUG nova.compute.manager [req-2bd7ea6f-e3ac-4f5f-ad29-904af2959e4e req-308a81c1-27ad-40e1-947a-d87cda0aac3d service nova] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] No waiting events found dispatching network-vif-plugged-da4e63a0-6fb2-436b-b720-8c1e1b21decf {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1169.233730] env[68638]: WARNING nova.compute.manager [req-2bd7ea6f-e3ac-4f5f-ad29-904af2959e4e req-308a81c1-27ad-40e1-947a-d87cda0aac3d service nova] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Received unexpected event network-vif-plugged-da4e63a0-6fb2-436b-b720-8c1e1b21decf for instance with vm_state building and task_state spawning. [ 1169.233779] env[68638]: DEBUG nova.compute.manager [req-2bd7ea6f-e3ac-4f5f-ad29-904af2959e4e req-308a81c1-27ad-40e1-947a-d87cda0aac3d service nova] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Received event network-changed-da4e63a0-6fb2-436b-b720-8c1e1b21decf {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1169.233913] env[68638]: DEBUG nova.compute.manager [req-2bd7ea6f-e3ac-4f5f-ad29-904af2959e4e req-308a81c1-27ad-40e1-947a-d87cda0aac3d service nova] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Refreshing instance network info cache due to event network-changed-da4e63a0-6fb2-436b-b720-8c1e1b21decf. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1169.234095] env[68638]: DEBUG oslo_concurrency.lockutils [req-2bd7ea6f-e3ac-4f5f-ad29-904af2959e4e req-308a81c1-27ad-40e1-947a-d87cda0aac3d service nova] Acquiring lock "refresh_cache-e2e74700-aa83-484a-a61f-9f98a6019fdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.234261] env[68638]: DEBUG oslo_concurrency.lockutils [req-2bd7ea6f-e3ac-4f5f-ad29-904af2959e4e req-308a81c1-27ad-40e1-947a-d87cda0aac3d service nova] Acquired lock "refresh_cache-e2e74700-aa83-484a-a61f-9f98a6019fdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1169.234384] env[68638]: DEBUG nova.network.neutron [req-2bd7ea6f-e3ac-4f5f-ad29-904af2959e4e req-308a81c1-27ad-40e1-947a-d87cda0aac3d service nova] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Refreshing network info cache for port da4e63a0-6fb2-436b-b720-8c1e1b21decf {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1169.264068] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "refresh_cache-e2e74700-aa83-484a-a61f-9f98a6019fdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.311290] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1169.311443] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1169.311624] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Deleting the datastore file [datastore1] e0903192-4fa7-437a-9023-33e8e65124e3 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1169.311903] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e9fdd959-2294-44ca-8b69-842fa5e6cc2c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.319293] env[68638]: DEBUG oslo_vmware.api [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1169.319293] env[68638]: value = "task-2834501" [ 1169.319293] env[68638]: _type = "Task" [ 1169.319293] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.327197] env[68638]: DEBUG oslo_vmware.api [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834501, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.492391] env[68638]: DEBUG oslo_concurrency.lockutils [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.308s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.494559] env[68638]: DEBUG oslo_concurrency.lockutils [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.379s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1169.548764] env[68638]: INFO nova.network.neutron [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Updating port 2725817f-dd0e-4f09-ba4d-70f48e578f8c with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1169.662453] env[68638]: DEBUG nova.network.neutron [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Port b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8 binding to destination host cpu-1 is already ACTIVE {{(pid=68638) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1169.775331] env[68638]: DEBUG nova.network.neutron [req-2bd7ea6f-e3ac-4f5f-ad29-904af2959e4e req-308a81c1-27ad-40e1-947a-d87cda0aac3d service nova] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1169.831543] env[68638]: DEBUG oslo_vmware.api [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834501, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.209262} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.831798] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1169.831985] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1169.834682] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1169.834682] env[68638]: INFO nova.compute.manager [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Took 3.57 seconds to destroy the instance on the hypervisor. [ 1169.834904] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1169.835189] env[68638]: DEBUG nova.compute.manager [-] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1169.835303] env[68638]: DEBUG nova.network.neutron [-] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1169.865166] env[68638]: DEBUG nova.network.neutron [req-2bd7ea6f-e3ac-4f5f-ad29-904af2959e4e req-308a81c1-27ad-40e1-947a-d87cda0aac3d service nova] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.000088] env[68638]: INFO nova.compute.claims [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1170.338440] env[68638]: DEBUG nova.compute.manager [req-c2def9b6-242b-45fe-9781-7db3e9fe8681 req-e8294e70-107b-4bb2-8c48-9c13953ef7ee service nova] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Received event network-vif-deleted-ef048785-d375-47e3-9f3c-2f26fd1bb175 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1170.339033] env[68638]: INFO nova.compute.manager [req-c2def9b6-242b-45fe-9781-7db3e9fe8681 req-e8294e70-107b-4bb2-8c48-9c13953ef7ee service nova] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Neutron deleted interface ef048785-d375-47e3-9f3c-2f26fd1bb175; detaching it from the instance and deleting it from the info cache [ 1170.339033] env[68638]: DEBUG nova.network.neutron [req-c2def9b6-242b-45fe-9781-7db3e9fe8681 req-e8294e70-107b-4bb2-8c48-9c13953ef7ee service nova] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.367903] env[68638]: DEBUG oslo_concurrency.lockutils [req-2bd7ea6f-e3ac-4f5f-ad29-904af2959e4e req-308a81c1-27ad-40e1-947a-d87cda0aac3d service nova] Releasing lock "refresh_cache-e2e74700-aa83-484a-a61f-9f98a6019fdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1170.368667] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquired lock "refresh_cache-e2e74700-aa83-484a-a61f-9f98a6019fdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1170.368830] env[68638]: DEBUG nova.network.neutron [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1170.507484] env[68638]: INFO nova.compute.resource_tracker [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Updating resource usage from migration 1ae08ae0-f8a3-4c61-96fa-4b16d7ca8f4d [ 1170.685014] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "00a22fef-5d10-4413-a9aa-070a1a863cdd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1170.685251] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "00a22fef-5d10-4413-a9aa-070a1a863cdd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1170.685429] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "00a22fef-5d10-4413-a9aa-070a1a863cdd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1170.712566] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40468d7a-d0ce-4020-9cd7-06f5953a11ee {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.720294] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be4ebe3c-ee48-45cd-81be-3f68eb4becdc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.750583] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d324d8c8-972c-4a22-a13d-e766343d97b7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.757585] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7693f764-d98f-498b-b1c1-e6cbebf3be70 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.772017] env[68638]: DEBUG nova.compute.provider_tree [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1170.822652] env[68638]: DEBUG nova.network.neutron [-] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.840789] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-56dc5e8b-e2a0-4c7c-be64-d14ddca86bb7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.851096] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082e8314-5626-4f6a-a481-e5eb59aa9820 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.885130] env[68638]: DEBUG nova.compute.manager [req-c2def9b6-242b-45fe-9781-7db3e9fe8681 req-e8294e70-107b-4bb2-8c48-9c13953ef7ee service nova] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Detach interface failed, port_id=ef048785-d375-47e3-9f3c-2f26fd1bb175, reason: Instance e0903192-4fa7-437a-9023-33e8e65124e3 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1170.917265] env[68638]: DEBUG nova.network.neutron [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1171.052739] env[68638]: DEBUG nova.network.neutron [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Updating instance_info_cache with network_info: [{"id": "da4e63a0-6fb2-436b-b720-8c1e1b21decf", "address": "fa:16:3e:0d:6b:3d", "network": {"id": "5de0e424-8bf1-4515-8c49-06607ad85c61", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1760008184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc24eaf6cf74d539558c0a736e18c3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda4e63a0-6f", "ovs_interfaceid": "da4e63a0-6fb2-436b-b720-8c1e1b21decf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.059957] env[68638]: DEBUG oslo_concurrency.lockutils [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquiring lock "refresh_cache-a576ba6f-5e3b-4408-b95d-2084a072ec12" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.059957] env[68638]: DEBUG oslo_concurrency.lockutils [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquired lock "refresh_cache-a576ba6f-5e3b-4408-b95d-2084a072ec12" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1171.060177] env[68638]: DEBUG nova.network.neutron [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1171.124631] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1ae13e57-641e-4e12-b825-5e2f50a7adfc tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "ace44b04-6dcf-4845-af4e-b28ddeebe60e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.124631] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1ae13e57-641e-4e12-b825-5e2f50a7adfc tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "ace44b04-6dcf-4845-af4e-b28ddeebe60e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.260776] env[68638]: DEBUG nova.compute.manager [req-2f22707b-72bb-48bb-92d8-b7d029d45db8 req-23a8080d-40f8-4774-8419-326a73b66fb3 service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Received event network-vif-plugged-2725817f-dd0e-4f09-ba4d-70f48e578f8c {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1171.261020] env[68638]: DEBUG oslo_concurrency.lockutils [req-2f22707b-72bb-48bb-92d8-b7d029d45db8 req-23a8080d-40f8-4774-8419-326a73b66fb3 service nova] Acquiring lock "a576ba6f-5e3b-4408-b95d-2084a072ec12-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.261229] env[68638]: DEBUG oslo_concurrency.lockutils [req-2f22707b-72bb-48bb-92d8-b7d029d45db8 req-23a8080d-40f8-4774-8419-326a73b66fb3 service nova] Lock "a576ba6f-5e3b-4408-b95d-2084a072ec12-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.261395] env[68638]: DEBUG oslo_concurrency.lockutils [req-2f22707b-72bb-48bb-92d8-b7d029d45db8 req-23a8080d-40f8-4774-8419-326a73b66fb3 service nova] Lock "a576ba6f-5e3b-4408-b95d-2084a072ec12-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.261561] env[68638]: DEBUG nova.compute.manager [req-2f22707b-72bb-48bb-92d8-b7d029d45db8 req-23a8080d-40f8-4774-8419-326a73b66fb3 service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] No waiting events found dispatching network-vif-plugged-2725817f-dd0e-4f09-ba4d-70f48e578f8c {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1171.261721] env[68638]: WARNING nova.compute.manager [req-2f22707b-72bb-48bb-92d8-b7d029d45db8 req-23a8080d-40f8-4774-8419-326a73b66fb3 service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Received unexpected event network-vif-plugged-2725817f-dd0e-4f09-ba4d-70f48e578f8c for instance with vm_state shelved_offloaded and task_state spawning. [ 1171.261877] env[68638]: DEBUG nova.compute.manager [req-2f22707b-72bb-48bb-92d8-b7d029d45db8 req-23a8080d-40f8-4774-8419-326a73b66fb3 service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Received event network-changed-2725817f-dd0e-4f09-ba4d-70f48e578f8c {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1171.262037] env[68638]: DEBUG nova.compute.manager [req-2f22707b-72bb-48bb-92d8-b7d029d45db8 req-23a8080d-40f8-4774-8419-326a73b66fb3 service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Refreshing instance network info cache due to event network-changed-2725817f-dd0e-4f09-ba4d-70f48e578f8c. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1171.262207] env[68638]: DEBUG oslo_concurrency.lockutils [req-2f22707b-72bb-48bb-92d8-b7d029d45db8 req-23a8080d-40f8-4774-8419-326a73b66fb3 service nova] Acquiring lock "refresh_cache-a576ba6f-5e3b-4408-b95d-2084a072ec12" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.275111] env[68638]: DEBUG nova.scheduler.client.report [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1171.324987] env[68638]: INFO nova.compute.manager [-] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Took 1.49 seconds to deallocate network for instance. [ 1171.555730] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Releasing lock "refresh_cache-e2e74700-aa83-484a-a61f-9f98a6019fdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1171.556173] env[68638]: DEBUG nova.compute.manager [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Instance network_info: |[{"id": "da4e63a0-6fb2-436b-b720-8c1e1b21decf", "address": "fa:16:3e:0d:6b:3d", "network": {"id": "5de0e424-8bf1-4515-8c49-06607ad85c61", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1760008184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc24eaf6cf74d539558c0a736e18c3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda4e63a0-6f", "ovs_interfaceid": "da4e63a0-6fb2-436b-b720-8c1e1b21decf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1171.556455] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0d:6b:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08fb4857-7f9b-4f97-86ef-415341fb595d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'da4e63a0-6fb2-436b-b720-8c1e1b21decf', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1171.564351] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1171.566414] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1171.566682] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a25f443f-d66e-4b2f-9e2a-cf76aa1055e1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.586378] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1171.586378] env[68638]: value = "task-2834502" [ 1171.586378] env[68638]: _type = "Task" [ 1171.586378] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.593623] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834502, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.627605] env[68638]: INFO nova.compute.manager [None req-1ae13e57-641e-4e12-b825-5e2f50a7adfc tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Detaching volume 66d78f72-80b8-487d-8315-0d99a3f6172d [ 1171.663013] env[68638]: INFO nova.virt.block_device [None req-1ae13e57-641e-4e12-b825-5e2f50a7adfc tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Attempting to driver detach volume 66d78f72-80b8-487d-8315-0d99a3f6172d from mountpoint /dev/sdb [ 1171.663273] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ae13e57-641e-4e12-b825-5e2f50a7adfc tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Volume detach. Driver type: vmdk {{(pid=68638) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1171.663465] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ae13e57-641e-4e12-b825-5e2f50a7adfc tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570036', 'volume_id': '66d78f72-80b8-487d-8315-0d99a3f6172d', 'name': 'volume-66d78f72-80b8-487d-8315-0d99a3f6172d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ace44b04-6dcf-4845-af4e-b28ddeebe60e', 'attached_at': '', 'detached_at': '', 'volume_id': '66d78f72-80b8-487d-8315-0d99a3f6172d', 'serial': '66d78f72-80b8-487d-8315-0d99a3f6172d'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1171.664355] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d99ba0-ad76-4cb2-bb8b-5387264d06e0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.689174] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76571130-74a8-4224-b017-12f437058f9d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.698730] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a65f348f-0955-4819-995a-919d57a99e37 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.724261] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe702634-7080-4df8-b805-7bae740322a5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.727265] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "refresh_cache-00a22fef-5d10-4413-a9aa-070a1a863cdd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.727439] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired lock "refresh_cache-00a22fef-5d10-4413-a9aa-070a1a863cdd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1171.727616] env[68638]: DEBUG nova.network.neutron [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1171.741138] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ae13e57-641e-4e12-b825-5e2f50a7adfc tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] The volume has not been displaced from its original location: [datastore1] volume-66d78f72-80b8-487d-8315-0d99a3f6172d/volume-66d78f72-80b8-487d-8315-0d99a3f6172d.vmdk. No consolidation needed. {{(pid=68638) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1171.746229] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ae13e57-641e-4e12-b825-5e2f50a7adfc tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Reconfiguring VM instance instance-00000065 to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1171.747111] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dc7b831d-ddc3-4289-a098-6f0223519b15 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.766881] env[68638]: DEBUG oslo_vmware.api [None req-1ae13e57-641e-4e12-b825-5e2f50a7adfc tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1171.766881] env[68638]: value = "task-2834503" [ 1171.766881] env[68638]: _type = "Task" [ 1171.766881] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.778732] env[68638]: DEBUG oslo_vmware.api [None req-1ae13e57-641e-4e12-b825-5e2f50a7adfc tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834503, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.779962] env[68638]: DEBUG oslo_concurrency.lockutils [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.285s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.780221] env[68638]: INFO nova.compute.manager [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Migrating [ 1171.788531] env[68638]: DEBUG oslo_concurrency.lockutils [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.083s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.788868] env[68638]: DEBUG nova.objects.instance [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lazy-loading 'resources' on Instance uuid 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1171.800962] env[68638]: DEBUG nova.network.neutron [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Updating instance_info_cache with network_info: [{"id": "2725817f-dd0e-4f09-ba4d-70f48e578f8c", "address": "fa:16:3e:68:8a:2b", "network": {"id": "72c025a9-b352-4718-9ffb-469abb0f7099", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1791072145-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8938cbcafe93492e8f53613d992790bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2725817f-dd", "ovs_interfaceid": "2725817f-dd0e-4f09-ba4d-70f48e578f8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.866929] env[68638]: INFO nova.compute.manager [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Took 0.54 seconds to detach 1 volumes for instance. [ 1171.869356] env[68638]: DEBUG nova.compute.manager [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Deleting volume: aba426ca-0b6c-4510-8544-7a9bd4b9af38 {{(pid=68638) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1172.098388] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834502, 'name': CreateVM_Task, 'duration_secs': 0.302093} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.098859] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1172.099670] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1172.100024] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1172.101467] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1172.101467] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fab1b339-9752-4bb9-865d-2633bc8eea9b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.104923] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1172.104923] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52522bae-78eb-f994-c966-4adfe6f395db" [ 1172.104923] env[68638]: _type = "Task" [ 1172.104923] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.112129] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52522bae-78eb-f994-c966-4adfe6f395db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.278260] env[68638]: DEBUG oslo_vmware.api [None req-1ae13e57-641e-4e12-b825-5e2f50a7adfc tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834503, 'name': ReconfigVM_Task, 'duration_secs': 0.234693} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.278555] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ae13e57-641e-4e12-b825-5e2f50a7adfc tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Reconfigured VM instance instance-00000065 to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1172.283473] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c70dd990-92eb-42d3-8ad3-d70f408502e9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.298764] env[68638]: DEBUG oslo_vmware.api [None req-1ae13e57-641e-4e12-b825-5e2f50a7adfc tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1172.298764] env[68638]: value = "task-2834505" [ 1172.298764] env[68638]: _type = "Task" [ 1172.298764] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.303708] env[68638]: DEBUG oslo_concurrency.lockutils [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "refresh_cache-2e788c4c-f6d1-4001-9389-1068887d205f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1172.303893] env[68638]: DEBUG oslo_concurrency.lockutils [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired lock "refresh_cache-2e788c4c-f6d1-4001-9389-1068887d205f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1172.304207] env[68638]: DEBUG nova.network.neutron [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1172.306542] env[68638]: DEBUG oslo_concurrency.lockutils [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Releasing lock "refresh_cache-a576ba6f-5e3b-4408-b95d-2084a072ec12" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1172.308897] env[68638]: DEBUG oslo_concurrency.lockutils [req-2f22707b-72bb-48bb-92d8-b7d029d45db8 req-23a8080d-40f8-4774-8419-326a73b66fb3 service nova] Acquired lock "refresh_cache-a576ba6f-5e3b-4408-b95d-2084a072ec12" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1172.309169] env[68638]: DEBUG nova.network.neutron [req-2f22707b-72bb-48bb-92d8-b7d029d45db8 req-23a8080d-40f8-4774-8419-326a73b66fb3 service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Refreshing network info cache for port 2725817f-dd0e-4f09-ba4d-70f48e578f8c {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1172.313476] env[68638]: DEBUG oslo_vmware.api [None req-1ae13e57-641e-4e12-b825-5e2f50a7adfc tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834505, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.338293] env[68638]: DEBUG nova.virt.hardware [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c9366b6ff918192e8ace3094f8434718',container_format='bare',created_at=2025-03-07T02:37:32Z,direct_url=,disk_format='vmdk',id=a04f22d5-e7b9-474d-8313-7a4349baeb1c,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-626903088-shelved',owner='8938cbcafe93492e8f53613d992790bf',properties=ImageMetaProps,protected=,size=31665664,status='active',tags=,updated_at=2025-03-07T02:37:46Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1172.338586] env[68638]: DEBUG nova.virt.hardware [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1172.338773] env[68638]: DEBUG nova.virt.hardware [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1172.338986] env[68638]: DEBUG nova.virt.hardware [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1172.339189] env[68638]: DEBUG nova.virt.hardware [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1172.339369] env[68638]: DEBUG nova.virt.hardware [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1172.339675] env[68638]: DEBUG nova.virt.hardware [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1172.339869] env[68638]: DEBUG nova.virt.hardware [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1172.340063] env[68638]: DEBUG nova.virt.hardware [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1172.340239] env[68638]: DEBUG nova.virt.hardware [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1172.340414] env[68638]: DEBUG nova.virt.hardware [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1172.341542] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-543b3cbf-d7ab-41fa-9144-84ffd0593e5d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.353807] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce69d980-b610-4ffc-b0aa-3b936e97f79a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.371177] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:8a:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccf76700-491b-4462-ab19-e6d3a9ff87ac', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2725817f-dd0e-4f09-ba4d-70f48e578f8c', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1172.378829] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1172.381721] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1172.384157] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-98df814e-f0b1-46e0-8fe2-1c7e765f75b2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.406617] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1172.406617] env[68638]: value = "task-2834506" [ 1172.406617] env[68638]: _type = "Task" [ 1172.406617] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.411190] env[68638]: DEBUG oslo_concurrency.lockutils [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.417317] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834506, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.520812] env[68638]: DEBUG nova.network.neutron [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Updating instance_info_cache with network_info: [{"id": "b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8", "address": "fa:16:3e:d6:46:73", "network": {"id": "e7719a30-81aa-48f1-a272-5246f78d9891", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1890376720-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fdd5447a0546b7b0fe2ed9ea0efc73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1a8c37a-0e", "ovs_interfaceid": "b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1172.549954] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c40c454a-ecb1-45ef-8198-1069a05ed382 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.557682] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ff70ba-89d1-47ac-88e2-427f18634d76 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.589077] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccdd4189-7ecf-45da-bca6-e67abd7211c5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.596068] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a24010-4953-4fe2-8f2a-03ffb4dac597 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.609017] env[68638]: DEBUG nova.compute.provider_tree [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1172.617629] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52522bae-78eb-f994-c966-4adfe6f395db, 'name': SearchDatastore_Task, 'duration_secs': 0.00996} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.618437] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1172.618667] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1172.618897] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1172.619061] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1172.619244] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1172.619699] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8e859d39-bfc0-45c7-9d8e-6e7be9bbc8c3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.627507] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1172.627671] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1172.628365] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4398ef20-69ce-414a-9d72-11fd3e645363 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.633072] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1172.633072] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ca2803-1a9b-7b2c-a6ac-03a0557a74e7" [ 1172.633072] env[68638]: _type = "Task" [ 1172.633072] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.640513] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ca2803-1a9b-7b2c-a6ac-03a0557a74e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.808664] env[68638]: DEBUG oslo_vmware.api [None req-1ae13e57-641e-4e12-b825-5e2f50a7adfc tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834505, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.916323] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834506, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.020906] env[68638]: DEBUG nova.network.neutron [req-2f22707b-72bb-48bb-92d8-b7d029d45db8 req-23a8080d-40f8-4774-8419-326a73b66fb3 service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Updated VIF entry in instance network info cache for port 2725817f-dd0e-4f09-ba4d-70f48e578f8c. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1173.020906] env[68638]: DEBUG nova.network.neutron [req-2f22707b-72bb-48bb-92d8-b7d029d45db8 req-23a8080d-40f8-4774-8419-326a73b66fb3 service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Updating instance_info_cache with network_info: [{"id": "2725817f-dd0e-4f09-ba4d-70f48e578f8c", "address": "fa:16:3e:68:8a:2b", "network": {"id": "72c025a9-b352-4718-9ffb-469abb0f7099", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1791072145-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8938cbcafe93492e8f53613d992790bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2725817f-dd", "ovs_interfaceid": "2725817f-dd0e-4f09-ba4d-70f48e578f8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1173.024408] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Releasing lock "refresh_cache-00a22fef-5d10-4413-a9aa-070a1a863cdd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1173.036931] env[68638]: DEBUG nova.network.neutron [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Updating instance_info_cache with network_info: [{"id": "c9f8dd22-b056-4864-91c0-671a170e81bd", "address": "fa:16:3e:d1:2d:01", "network": {"id": "4ccf9e56-9fb3-48ff-bf2d-a35faedb905b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1191830363-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ae89c3992e04141bf24be9d9e84e302", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9f8dd22-b0", "ovs_interfaceid": "c9f8dd22-b056-4864-91c0-671a170e81bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1173.114571] env[68638]: DEBUG nova.scheduler.client.report [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1173.143150] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ca2803-1a9b-7b2c-a6ac-03a0557a74e7, 'name': SearchDatastore_Task, 'duration_secs': 0.008991} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.143913] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecc25a0c-623c-45b0-bcce-2be51f9cee82 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.149104] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1173.149104] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]527f4727-0956-32e5-78d8-0af07ebd83ce" [ 1173.149104] env[68638]: _type = "Task" [ 1173.149104] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.157428] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527f4727-0956-32e5-78d8-0af07ebd83ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.309670] env[68638]: DEBUG oslo_vmware.api [None req-1ae13e57-641e-4e12-b825-5e2f50a7adfc tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834505, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.417429] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834506, 'name': CreateVM_Task} progress is 25%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.524350] env[68638]: DEBUG oslo_concurrency.lockutils [req-2f22707b-72bb-48bb-92d8-b7d029d45db8 req-23a8080d-40f8-4774-8419-326a73b66fb3 service nova] Releasing lock "refresh_cache-a576ba6f-5e3b-4408-b95d-2084a072ec12" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1173.539927] env[68638]: DEBUG oslo_concurrency.lockutils [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Releasing lock "refresh_cache-2e788c4c-f6d1-4001-9389-1068887d205f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1173.552110] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f9e1e50-e136-4057-9cb2-737c64856b49 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.574422] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11ec5c71-3927-4879-bd18-51233488a4c9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.581410] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Updating instance '00a22fef-5d10-4413-a9aa-070a1a863cdd' progress to 83 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1173.620132] env[68638]: DEBUG oslo_concurrency.lockutils [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.831s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1173.622980] env[68638]: DEBUG oslo_concurrency.lockutils [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.212s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1173.623432] env[68638]: DEBUG nova.objects.instance [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lazy-loading 'resources' on Instance uuid e0903192-4fa7-437a-9023-33e8e65124e3 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1173.641948] env[68638]: INFO nova.scheduler.client.report [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Deleted allocations for instance 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1 [ 1173.660793] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527f4727-0956-32e5-78d8-0af07ebd83ce, 'name': SearchDatastore_Task, 'duration_secs': 0.010284} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.660793] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1173.660793] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] e2e74700-aa83-484a-a61f-9f98a6019fdb/e2e74700-aa83-484a-a61f-9f98a6019fdb.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1173.661097] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-48268ec8-c7e2-4547-92fb-9dc7cf3d13f1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.667435] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1173.667435] env[68638]: value = "task-2834507" [ 1173.667435] env[68638]: _type = "Task" [ 1173.667435] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.675509] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834507, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.810730] env[68638]: DEBUG oslo_vmware.api [None req-1ae13e57-641e-4e12-b825-5e2f50a7adfc tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834505, 'name': ReconfigVM_Task, 'duration_secs': 1.168891} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.811058] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ae13e57-641e-4e12-b825-5e2f50a7adfc tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570036', 'volume_id': '66d78f72-80b8-487d-8315-0d99a3f6172d', 'name': 'volume-66d78f72-80b8-487d-8315-0d99a3f6172d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ace44b04-6dcf-4845-af4e-b28ddeebe60e', 'attached_at': '', 'detached_at': '', 'volume_id': '66d78f72-80b8-487d-8315-0d99a3f6172d', 'serial': '66d78f72-80b8-487d-8315-0d99a3f6172d'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1173.919867] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834506, 'name': CreateVM_Task, 'duration_secs': 1.317741} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.920096] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1173.920912] env[68638]: DEBUG oslo_concurrency.lockutils [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a04f22d5-e7b9-474d-8313-7a4349baeb1c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1173.921172] env[68638]: DEBUG oslo_concurrency.lockutils [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a04f22d5-e7b9-474d-8313-7a4349baeb1c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1173.921642] env[68638]: DEBUG oslo_concurrency.lockutils [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a04f22d5-e7b9-474d-8313-7a4349baeb1c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1173.921964] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55a0919a-934a-4440-9231-15aac97c9db0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.929864] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1173.929864] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]529842d4-190e-7606-2949-cff67e418448" [ 1173.929864] env[68638]: _type = "Task" [ 1173.929864] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.938626] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]529842d4-190e-7606-2949-cff67e418448, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.087960] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1174.089672] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f5319e35-89a3-4104-a044-8f6aa4bd04b8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.097616] env[68638]: DEBUG oslo_vmware.api [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1174.097616] env[68638]: value = "task-2834508" [ 1174.097616] env[68638]: _type = "Task" [ 1174.097616] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.109467] env[68638]: DEBUG oslo_vmware.api [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834508, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.152848] env[68638]: DEBUG oslo_concurrency.lockutils [None req-87630065-1290-4eae-9a8f-4dd3940aaba4 tempest-AttachVolumeTestJSON-1230075112 tempest-AttachVolumeTestJSON-1230075112-project-member] Lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.538s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.153126] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 6.919s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1174.153321] env[68638]: INFO nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] During sync_power_state the instance has a pending task (deleting). Skip. [ 1174.153493] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.180325] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834507, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.479688} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.180647] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] e2e74700-aa83-484a-a61f-9f98a6019fdb/e2e74700-aa83-484a-a61f-9f98a6019fdb.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1174.180865] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1174.181197] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-be9a5968-5d6e-494a-ad01-151e78d61689 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.190478] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1174.190478] env[68638]: value = "task-2834509" [ 1174.190478] env[68638]: _type = "Task" [ 1174.190478] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.201314] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834509, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.338533] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff2de080-5b94-41a1-9ca5-2f2cc4e1e955 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.346542] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1b53d1d-6b13-4c7f-b530-ca6f2a57e6cd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.378711] env[68638]: DEBUG nova.objects.instance [None req-1ae13e57-641e-4e12-b825-5e2f50a7adfc tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lazy-loading 'flavor' on Instance uuid ace44b04-6dcf-4845-af4e-b28ddeebe60e {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1174.380649] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4667b837-bbf0-43f8-a7e1-5a9b72af90b5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.388381] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c355263-b6a1-4c63-850a-3f943bdb988c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.404362] env[68638]: DEBUG nova.compute.provider_tree [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1174.439534] env[68638]: DEBUG oslo_concurrency.lockutils [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a04f22d5-e7b9-474d-8313-7a4349baeb1c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1174.439803] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Processing image a04f22d5-e7b9-474d-8313-7a4349baeb1c {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1174.440046] env[68638]: DEBUG oslo_concurrency.lockutils [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a04f22d5-e7b9-474d-8313-7a4349baeb1c/a04f22d5-e7b9-474d-8313-7a4349baeb1c.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1174.440198] env[68638]: DEBUG oslo_concurrency.lockutils [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a04f22d5-e7b9-474d-8313-7a4349baeb1c/a04f22d5-e7b9-474d-8313-7a4349baeb1c.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1174.440377] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1174.440625] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bddc31eb-c734-4b3c-a273-96da6a85869a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.454122] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1174.454337] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1174.455232] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bd5dfdb-fcb2-4242-a8b0-41bff4647859 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.460971] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1174.460971] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]522dc188-8fc7-44a9-c248-eb213987132e" [ 1174.460971] env[68638]: _type = "Task" [ 1174.460971] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.469239] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]522dc188-8fc7-44a9-c248-eb213987132e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.607399] env[68638]: DEBUG oslo_vmware.api [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834508, 'name': PowerOnVM_Task, 'duration_secs': 0.447449} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.608574] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1174.608574] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c6785b6d-cf3e-406e-8036-acff3f7a39a2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Updating instance '00a22fef-5d10-4413-a9aa-070a1a863cdd' progress to 100 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1174.701911] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834509, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072612} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.702204] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1174.702974] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187b633f-0779-49f3-8c3e-754e5b79e2d7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.725673] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] e2e74700-aa83-484a-a61f-9f98a6019fdb/e2e74700-aa83-484a-a61f-9f98a6019fdb.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1174.726385] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0ab88e2-0b93-40b5-a7cc-709260dc5373 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.746214] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1174.746214] env[68638]: value = "task-2834511" [ 1174.746214] env[68638]: _type = "Task" [ 1174.746214] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.754283] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834511, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.908367] env[68638]: DEBUG nova.scheduler.client.report [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1174.971221] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Preparing fetch location {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1174.971470] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Fetch image to [datastore2] OSTACK_IMG_7b868d98-0f9b-4bd2-9a4a-edadf8f9cb69/OSTACK_IMG_7b868d98-0f9b-4bd2-9a4a-edadf8f9cb69.vmdk {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1174.971653] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Downloading stream optimized image a04f22d5-e7b9-474d-8313-7a4349baeb1c to [datastore2] OSTACK_IMG_7b868d98-0f9b-4bd2-9a4a-edadf8f9cb69/OSTACK_IMG_7b868d98-0f9b-4bd2-9a4a-edadf8f9cb69.vmdk on the data store datastore2 as vApp {{(pid=68638) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1174.971823] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Downloading image file data a04f22d5-e7b9-474d-8313-7a4349baeb1c to the ESX as VM named 'OSTACK_IMG_7b868d98-0f9b-4bd2-9a4a-edadf8f9cb69' {{(pid=68638) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1175.047083] env[68638]: DEBUG oslo_vmware.rw_handles [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1175.047083] env[68638]: value = "resgroup-9" [ 1175.047083] env[68638]: _type = "ResourcePool" [ 1175.047083] env[68638]: }. {{(pid=68638) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1175.047487] env[68638]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-3954b86b-fc30-439c-8524-796ff693c492 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.065680] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e12a5b53-891d-464d-9648-e1a3e23efa0f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.069262] env[68638]: DEBUG oslo_vmware.rw_handles [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lease: (returnval){ [ 1175.069262] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ca9741-6ae1-49df-0e38-9b12108235fd" [ 1175.069262] env[68638]: _type = "HttpNfcLease" [ 1175.069262] env[68638]: } obtained for vApp import into resource pool (val){ [ 1175.069262] env[68638]: value = "resgroup-9" [ 1175.069262] env[68638]: _type = "ResourcePool" [ 1175.069262] env[68638]: }. {{(pid=68638) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1175.069600] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the lease: (returnval){ [ 1175.069600] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ca9741-6ae1-49df-0e38-9b12108235fd" [ 1175.069600] env[68638]: _type = "HttpNfcLease" [ 1175.069600] env[68638]: } to be ready. {{(pid=68638) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1175.085107] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Updating instance '2e788c4c-f6d1-4001-9389-1068887d205f' progress to 0 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1175.092494] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1175.092494] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ca9741-6ae1-49df-0e38-9b12108235fd" [ 1175.092494] env[68638]: _type = "HttpNfcLease" [ 1175.092494] env[68638]: } is initializing. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1175.257895] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834511, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.388676] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1ae13e57-641e-4e12-b825-5e2f50a7adfc tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "ace44b04-6dcf-4845-af4e-b28ddeebe60e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.264s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.412482] env[68638]: DEBUG oslo_concurrency.lockutils [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.789s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.438083] env[68638]: INFO nova.scheduler.client.report [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Deleted allocations for instance e0903192-4fa7-437a-9023-33e8e65124e3 [ 1175.581318] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1175.581318] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ca9741-6ae1-49df-0e38-9b12108235fd" [ 1175.581318] env[68638]: _type = "HttpNfcLease" [ 1175.581318] env[68638]: } is initializing. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1175.591133] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1175.591445] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6da93b5e-3391-4025-8b2e-db5eccf06150 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.599123] env[68638]: DEBUG oslo_vmware.api [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1175.599123] env[68638]: value = "task-2834513" [ 1175.599123] env[68638]: _type = "Task" [ 1175.599123] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.614485] env[68638]: DEBUG oslo_vmware.api [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834513, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.759028] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834511, 'name': ReconfigVM_Task, 'duration_secs': 0.69164} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.759028] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Reconfigured VM instance instance-00000075 to attach disk [datastore2] e2e74700-aa83-484a-a61f-9f98a6019fdb/e2e74700-aa83-484a-a61f-9f98a6019fdb.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1175.759294] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-de878c16-40be-459b-83fa-021dbf7c3039 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.765961] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1175.765961] env[68638]: value = "task-2834514" [ 1175.765961] env[68638]: _type = "Task" [ 1175.765961] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.775018] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834514, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.896454] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1175.946046] env[68638]: DEBUG oslo_concurrency.lockutils [None req-18af7bd6-5781-4eb7-8f82-4c6300e700c0 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "e0903192-4fa7-437a-9023-33e8e65124e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.193s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.947493] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "e0903192-4fa7-437a-9023-33e8e65124e3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 8.713s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.947493] env[68638]: INFO nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] During sync_power_state the instance has a pending task (deleting). Skip. [ 1175.947728] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "e0903192-4fa7-437a-9023-33e8e65124e3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1176.078105] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1176.078105] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ca9741-6ae1-49df-0e38-9b12108235fd" [ 1176.078105] env[68638]: _type = "HttpNfcLease" [ 1176.078105] env[68638]: } is ready. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1176.078105] env[68638]: DEBUG oslo_vmware.rw_handles [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1176.078105] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ca9741-6ae1-49df-0e38-9b12108235fd" [ 1176.078105] env[68638]: _type = "HttpNfcLease" [ 1176.078105] env[68638]: }. {{(pid=68638) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1176.078714] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-194b062d-dc47-43c7-9b39-5f6e8dc74fae {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.085957] env[68638]: DEBUG oslo_vmware.rw_handles [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e75c7d-8beb-e0d0-6c24-ac9860198cda/disk-0.vmdk from lease info. {{(pid=68638) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1176.086125] env[68638]: DEBUG oslo_vmware.rw_handles [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Creating HTTP connection to write to file with size = 31665664 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e75c7d-8beb-e0d0-6c24-ac9860198cda/disk-0.vmdk. {{(pid=68638) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1176.158590] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0f32d3dd-b462-48d0-b03c-600715ffa492 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.165935] env[68638]: DEBUG oslo_vmware.api [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834513, 'name': PowerOffVM_Task, 'duration_secs': 0.194802} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.167251] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1176.167467] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Updating instance '2e788c4c-f6d1-4001-9389-1068887d205f' progress to 17 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1176.276128] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834514, 'name': Rename_Task, 'duration_secs': 0.27768} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.276681] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1176.276931] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ac915b0a-5d95-4fa9-bb5a-c260e31c3e77 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.283007] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1176.283007] env[68638]: value = "task-2834515" [ 1176.283007] env[68638]: _type = "Task" [ 1176.283007] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.290007] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834515, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.493729] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "ace44b04-6dcf-4845-af4e-b28ddeebe60e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1176.493997] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "ace44b04-6dcf-4845-af4e-b28ddeebe60e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1176.494280] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "ace44b04-6dcf-4845-af4e-b28ddeebe60e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1176.494574] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "ace44b04-6dcf-4845-af4e-b28ddeebe60e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1176.494715] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "ace44b04-6dcf-4845-af4e-b28ddeebe60e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1176.497190] env[68638]: INFO nova.compute.manager [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Terminating instance [ 1176.674029] env[68638]: DEBUG nova.virt.hardware [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1176.674504] env[68638]: DEBUG nova.virt.hardware [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1176.674668] env[68638]: DEBUG nova.virt.hardware [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1176.674979] env[68638]: DEBUG nova.virt.hardware [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1176.675291] env[68638]: DEBUG nova.virt.hardware [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1176.675541] env[68638]: DEBUG nova.virt.hardware [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1176.675878] env[68638]: DEBUG nova.virt.hardware [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1176.676109] env[68638]: DEBUG nova.virt.hardware [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1176.676302] env[68638]: DEBUG nova.virt.hardware [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1176.676476] env[68638]: DEBUG nova.virt.hardware [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1176.676650] env[68638]: DEBUG nova.virt.hardware [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1176.682271] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2aaffa7b-a28f-47a6-8cc8-bff9e8befe87 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.698758] env[68638]: DEBUG oslo_vmware.api [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1176.698758] env[68638]: value = "task-2834516" [ 1176.698758] env[68638]: _type = "Task" [ 1176.698758] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.715748] env[68638]: DEBUG oslo_vmware.api [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834516, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.795528] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834515, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.817590] env[68638]: DEBUG oslo_concurrency.lockutils [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "cc2e9758-45ee-4e94-ad74-ba7d6c85f06d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1176.817928] env[68638]: DEBUG oslo_concurrency.lockutils [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "cc2e9758-45ee-4e94-ad74-ba7d6c85f06d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1176.818408] env[68638]: DEBUG oslo_concurrency.lockutils [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "cc2e9758-45ee-4e94-ad74-ba7d6c85f06d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1176.818759] env[68638]: DEBUG oslo_concurrency.lockutils [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "cc2e9758-45ee-4e94-ad74-ba7d6c85f06d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1176.819102] env[68638]: DEBUG oslo_concurrency.lockutils [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "cc2e9758-45ee-4e94-ad74-ba7d6c85f06d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1176.824248] env[68638]: INFO nova.compute.manager [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Terminating instance [ 1177.003610] env[68638]: DEBUG nova.compute.manager [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1177.003944] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1177.005048] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-946a2c7b-c17b-4e94-80b0-647990dddd72 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.015407] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1177.017054] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0258d235-3eab-4ea9-9fa4-a155a330dbea {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.023358] env[68638]: DEBUG oslo_vmware.api [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1177.023358] env[68638]: value = "task-2834517" [ 1177.023358] env[68638]: _type = "Task" [ 1177.023358] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.032598] env[68638]: DEBUG oslo_vmware.api [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834517, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.210931] env[68638]: DEBUG oslo_vmware.api [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834516, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.296880] env[68638]: DEBUG oslo_vmware.api [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834515, 'name': PowerOnVM_Task, 'duration_secs': 0.849543} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.297101] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1177.297261] env[68638]: INFO nova.compute.manager [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Took 9.09 seconds to spawn the instance on the hypervisor. [ 1177.297571] env[68638]: DEBUG nova.compute.manager [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1177.298543] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ecd1260-63e9-410d-a00e-fb2164177e79 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.328110] env[68638]: DEBUG nova.compute.manager [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1177.328376] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1177.330648] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4341c05e-7a28-4f2f-b7f2-ac53e5cc3a66 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.337976] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1177.338279] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-75bfd317-b49b-490a-b0ca-a6af0671890b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.345676] env[68638]: DEBUG oslo_vmware.api [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1177.345676] env[68638]: value = "task-2834518" [ 1177.345676] env[68638]: _type = "Task" [ 1177.345676] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.353610] env[68638]: DEBUG oslo_vmware.api [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834518, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.420357] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5161a9bf-8ac6-40ac-9224-ee3b110106c2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "00a22fef-5d10-4413-a9aa-070a1a863cdd" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1177.420675] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5161a9bf-8ac6-40ac-9224-ee3b110106c2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "00a22fef-5d10-4413-a9aa-070a1a863cdd" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1177.420924] env[68638]: DEBUG nova.compute.manager [None req-5161a9bf-8ac6-40ac-9224-ee3b110106c2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Going to confirm migration 5 {{(pid=68638) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1177.429965] env[68638]: DEBUG oslo_vmware.rw_handles [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Completed reading data from the image iterator. {{(pid=68638) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1177.430181] env[68638]: DEBUG oslo_vmware.rw_handles [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e75c7d-8beb-e0d0-6c24-ac9860198cda/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1177.431154] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a5a1ea0-37e4-401f-952c-6176c4caac80 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.438229] env[68638]: DEBUG oslo_vmware.rw_handles [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e75c7d-8beb-e0d0-6c24-ac9860198cda/disk-0.vmdk is in state: ready. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1177.438386] env[68638]: DEBUG oslo_vmware.rw_handles [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e75c7d-8beb-e0d0-6c24-ac9860198cda/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1177.438651] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-57065634-efe6-48fd-9f6c-65cafd4223c4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.535280] env[68638]: DEBUG oslo_vmware.api [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834517, 'name': PowerOffVM_Task, 'duration_secs': 0.307118} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.535594] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1177.535830] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1177.536115] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b2b4ec7f-4d1d-4fb6-a5ee-b80095d36da4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.610392] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1177.610650] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1177.610890] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Deleting the datastore file [datastore2] ace44b04-6dcf-4845-af4e-b28ddeebe60e {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1177.611161] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2f256cd5-7061-4124-a9d3-5788fad04bcf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.619562] env[68638]: DEBUG oslo_vmware.api [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1177.619562] env[68638]: value = "task-2834520" [ 1177.619562] env[68638]: _type = "Task" [ 1177.619562] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.627559] env[68638]: DEBUG oslo_vmware.api [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834520, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.648100] env[68638]: DEBUG oslo_vmware.rw_handles [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e75c7d-8beb-e0d0-6c24-ac9860198cda/disk-0.vmdk. {{(pid=68638) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1177.648332] env[68638]: INFO nova.virt.vmwareapi.images [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Downloaded image file data a04f22d5-e7b9-474d-8313-7a4349baeb1c [ 1177.649243] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c6945d8-1fb3-4aaf-888d-48daa6829706 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.667441] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ea1c56a-3a5a-4aad-88b1-07cc308656a5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.701280] env[68638]: INFO nova.virt.vmwareapi.images [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] The imported VM was unregistered [ 1177.703868] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Caching image {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1177.704133] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Creating directory with path [datastore2] devstack-image-cache_base/a04f22d5-e7b9-474d-8313-7a4349baeb1c {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1177.707698] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0b9a7d5-3ee5-45d1-9c1b-5fb7406fcdfe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.714386] env[68638]: DEBUG oslo_vmware.api [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834516, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.717012] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Created directory with path [datastore2] devstack-image-cache_base/a04f22d5-e7b9-474d-8313-7a4349baeb1c {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1177.717242] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_7b868d98-0f9b-4bd2-9a4a-edadf8f9cb69/OSTACK_IMG_7b868d98-0f9b-4bd2-9a4a-edadf8f9cb69.vmdk to [datastore2] devstack-image-cache_base/a04f22d5-e7b9-474d-8313-7a4349baeb1c/a04f22d5-e7b9-474d-8313-7a4349baeb1c.vmdk. {{(pid=68638) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1177.717486] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-ad3154d1-d39d-4fc6-8b51-13ced3532e67 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.723603] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1177.723603] env[68638]: value = "task-2834523" [ 1177.723603] env[68638]: _type = "Task" [ 1177.723603] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.730909] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834523, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.818488] env[68638]: INFO nova.compute.manager [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Took 16.65 seconds to build instance. [ 1177.854913] env[68638]: DEBUG oslo_vmware.api [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834518, 'name': PowerOffVM_Task, 'duration_secs': 0.28142} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.855238] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1177.855477] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1177.855696] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9f634f40-affa-4872-9ba8-71622a85ba41 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.934705] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1177.934945] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1177.935158] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Deleting the datastore file [datastore1] cc2e9758-45ee-4e94-ad74-ba7d6c85f06d {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1177.935451] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf7b0727-bc01-4cc5-8f48-2e18ffb641b4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.941796] env[68638]: DEBUG oslo_vmware.api [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1177.941796] env[68638]: value = "task-2834525" [ 1177.941796] env[68638]: _type = "Task" [ 1177.941796] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.951084] env[68638]: DEBUG oslo_vmware.api [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834525, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.013281] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5161a9bf-8ac6-40ac-9224-ee3b110106c2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "refresh_cache-00a22fef-5d10-4413-a9aa-070a1a863cdd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.013483] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5161a9bf-8ac6-40ac-9224-ee3b110106c2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired lock "refresh_cache-00a22fef-5d10-4413-a9aa-070a1a863cdd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1178.013665] env[68638]: DEBUG nova.network.neutron [None req-5161a9bf-8ac6-40ac-9224-ee3b110106c2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1178.013855] env[68638]: DEBUG nova.objects.instance [None req-5161a9bf-8ac6-40ac-9224-ee3b110106c2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lazy-loading 'info_cache' on Instance uuid 00a22fef-5d10-4413-a9aa-070a1a863cdd {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1178.136168] env[68638]: DEBUG oslo_vmware.api [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834520, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166936} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.136241] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1178.136430] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1178.136606] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1178.136788] env[68638]: INFO nova.compute.manager [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1178.137052] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1178.137472] env[68638]: DEBUG nova.compute.manager [-] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1178.137472] env[68638]: DEBUG nova.network.neutron [-] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1178.212771] env[68638]: DEBUG oslo_vmware.api [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834516, 'name': ReconfigVM_Task, 'duration_secs': 1.186878} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.213155] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Updating instance '2e788c4c-f6d1-4001-9389-1068887d205f' progress to 33 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1178.234815] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834523, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.320595] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a4091796-f233-46de-97fa-733d97e6d612 tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "e2e74700-aa83-484a-a61f-9f98a6019fdb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.162s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1178.320887] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "e2e74700-aa83-484a-a61f-9f98a6019fdb" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 11.083s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1178.321105] env[68638]: INFO nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] During sync_power_state the instance has a pending task (networking). Skip. [ 1178.321283] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "e2e74700-aa83-484a-a61f-9f98a6019fdb" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1178.335426] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1178.340173] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1178.455213] env[68638]: DEBUG oslo_vmware.api [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834525, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.308006} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.455213] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1178.455213] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1178.455835] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1178.455835] env[68638]: INFO nova.compute.manager [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1178.455835] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1178.456512] env[68638]: DEBUG nova.compute.manager [-] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1178.456512] env[68638]: DEBUG nova.network.neutron [-] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1178.720430] env[68638]: DEBUG nova.virt.hardware [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1178.720779] env[68638]: DEBUG nova.virt.hardware [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1178.720886] env[68638]: DEBUG nova.virt.hardware [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1178.721106] env[68638]: DEBUG nova.virt.hardware [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1178.721304] env[68638]: DEBUG nova.virt.hardware [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1178.721443] env[68638]: DEBUG nova.virt.hardware [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1178.721645] env[68638]: DEBUG nova.virt.hardware [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1178.721802] env[68638]: DEBUG nova.virt.hardware [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1178.721969] env[68638]: DEBUG nova.virt.hardware [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1178.724959] env[68638]: DEBUG nova.virt.hardware [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1178.725208] env[68638]: DEBUG nova.virt.hardware [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1178.733205] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Reconfiguring VM instance instance-00000074 to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1178.733637] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95091b33-bdb4-4506-af27-b6ae08ffcc59 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.768855] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834523, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.770445] env[68638]: DEBUG oslo_vmware.api [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1178.770445] env[68638]: value = "task-2834526" [ 1178.770445] env[68638]: _type = "Task" [ 1178.770445] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.786254] env[68638]: DEBUG oslo_vmware.api [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834526, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.825449] env[68638]: DEBUG nova.compute.manager [req-755c755b-9d9b-4e11-b5b6-449002de4824 req-1c46dcf9-38ea-44a2-a05f-0ad24ff6d809 service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Received event network-vif-deleted-acf48d4a-b19e-47d9-a807-d221c4f0fd05 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1178.825669] env[68638]: INFO nova.compute.manager [req-755c755b-9d9b-4e11-b5b6-449002de4824 req-1c46dcf9-38ea-44a2-a05f-0ad24ff6d809 service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Neutron deleted interface acf48d4a-b19e-47d9-a807-d221c4f0fd05; detaching it from the instance and deleting it from the info cache [ 1178.825778] env[68638]: DEBUG nova.network.neutron [req-755c755b-9d9b-4e11-b5b6-449002de4824 req-1c46dcf9-38ea-44a2-a05f-0ad24ff6d809 service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1179.108975] env[68638]: DEBUG nova.network.neutron [-] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1179.249925] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834523, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.282247] env[68638]: DEBUG nova.compute.manager [req-add5d5da-e034-4888-8001-2c66f5eb6a2c req-d216c432-3ae6-42f7-af0e-bbf4b3c813b0 service nova] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Received event network-vif-deleted-f197b79d-c838-421c-930b-a1d1d5f1b89c {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1179.282247] env[68638]: INFO nova.compute.manager [req-add5d5da-e034-4888-8001-2c66f5eb6a2c req-d216c432-3ae6-42f7-af0e-bbf4b3c813b0 service nova] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Neutron deleted interface f197b79d-c838-421c-930b-a1d1d5f1b89c; detaching it from the instance and deleting it from the info cache [ 1179.282247] env[68638]: DEBUG nova.network.neutron [req-add5d5da-e034-4888-8001-2c66f5eb6a2c req-d216c432-3ae6-42f7-af0e-bbf4b3c813b0 service nova] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1179.291843] env[68638]: DEBUG oslo_vmware.api [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834526, 'name': ReconfigVM_Task, 'duration_secs': 0.403681} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.292845] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Reconfigured VM instance instance-00000074 to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1179.294186] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9dbc90c-e02c-41ef-b1aa-0f6ee3ef05ca {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.298415] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "25c35c36-71c9-48cd-b7e4-6293eef890e5" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1179.298623] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "25c35c36-71c9-48cd-b7e4-6293eef890e5" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1179.298984] env[68638]: INFO nova.compute.manager [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Shelving [ 1179.325147] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Reconfiguring VM instance instance-00000074 to attach disk [datastore2] 2e788c4c-f6d1-4001-9389-1068887d205f/2e788c4c-f6d1-4001-9389-1068887d205f.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1179.326411] env[68638]: DEBUG nova.network.neutron [None req-5161a9bf-8ac6-40ac-9224-ee3b110106c2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Updating instance_info_cache with network_info: [{"id": "b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8", "address": "fa:16:3e:d6:46:73", "network": {"id": "e7719a30-81aa-48f1-a272-5246f78d9891", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1890376720-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fdd5447a0546b7b0fe2ed9ea0efc73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1a8c37a-0e", "ovs_interfaceid": "b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1179.328697] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f2e34aa-6fa9-4314-bf2c-5faa2adf4ac4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.342908] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1179.343115] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4936b581-0b1a-48b5-a5b5-12bf594a53a0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.353822] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1af7a1-8be1-445a-ac03-d6a0ea927acf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.369092] env[68638]: DEBUG oslo_vmware.api [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1179.369092] env[68638]: value = "task-2834527" [ 1179.369092] env[68638]: _type = "Task" [ 1179.369092] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.380672] env[68638]: DEBUG oslo_vmware.api [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834527, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.392179] env[68638]: DEBUG nova.compute.manager [req-755c755b-9d9b-4e11-b5b6-449002de4824 req-1c46dcf9-38ea-44a2-a05f-0ad24ff6d809 service nova] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Detach interface failed, port_id=acf48d4a-b19e-47d9-a807-d221c4f0fd05, reason: Instance ace44b04-6dcf-4845-af4e-b28ddeebe60e could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1179.597492] env[68638]: DEBUG nova.network.neutron [-] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1179.611775] env[68638]: INFO nova.compute.manager [-] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Took 1.47 seconds to deallocate network for instance. [ 1179.744571] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834523, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.786061] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b4425ef2-5c82-4a7f-875c-f14c6b3a2b72 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.796146] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-661db66e-c8a0-44ad-af26-4ca4dbdbdfee {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.839473] env[68638]: DEBUG nova.compute.manager [req-add5d5da-e034-4888-8001-2c66f5eb6a2c req-d216c432-3ae6-42f7-af0e-bbf4b3c813b0 service nova] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Detach interface failed, port_id=f197b79d-c838-421c-930b-a1d1d5f1b89c, reason: Instance cc2e9758-45ee-4e94-ad74-ba7d6c85f06d could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1179.842572] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5161a9bf-8ac6-40ac-9224-ee3b110106c2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Releasing lock "refresh_cache-00a22fef-5d10-4413-a9aa-070a1a863cdd" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1179.842839] env[68638]: DEBUG nova.objects.instance [None req-5161a9bf-8ac6-40ac-9224-ee3b110106c2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lazy-loading 'migration_context' on Instance uuid 00a22fef-5d10-4413-a9aa-070a1a863cdd {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1179.879607] env[68638]: DEBUG oslo_vmware.api [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834527, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.101587] env[68638]: INFO nova.compute.manager [-] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Took 1.65 seconds to deallocate network for instance. [ 1180.119131] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1180.119473] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1180.120054] env[68638]: DEBUG nova.objects.instance [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lazy-loading 'resources' on Instance uuid ace44b04-6dcf-4845-af4e-b28ddeebe60e {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1180.246300] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834523, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.435231} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.246534] env[68638]: INFO nova.virt.vmwareapi.ds_util [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_7b868d98-0f9b-4bd2-9a4a-edadf8f9cb69/OSTACK_IMG_7b868d98-0f9b-4bd2-9a4a-edadf8f9cb69.vmdk to [datastore2] devstack-image-cache_base/a04f22d5-e7b9-474d-8313-7a4349baeb1c/a04f22d5-e7b9-474d-8313-7a4349baeb1c.vmdk. [ 1180.246733] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Cleaning up location [datastore2] OSTACK_IMG_7b868d98-0f9b-4bd2-9a4a-edadf8f9cb69 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1180.246901] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_7b868d98-0f9b-4bd2-9a4a-edadf8f9cb69 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1180.247192] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a7280162-a3e1-4951-be27-0cfe407b3027 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.253792] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1180.253792] env[68638]: value = "task-2834529" [ 1180.253792] env[68638]: _type = "Task" [ 1180.253792] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.262336] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834529, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.321665] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1180.321997] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-70ae1bfe-8272-4ca9-bd48-6330a3e308f4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.328631] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1180.328631] env[68638]: value = "task-2834530" [ 1180.328631] env[68638]: _type = "Task" [ 1180.328631] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.336642] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834530, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.340240] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1180.340444] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1180.345461] env[68638]: DEBUG nova.objects.base [None req-5161a9bf-8ac6-40ac-9224-ee3b110106c2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Object Instance<00a22fef-5d10-4413-a9aa-070a1a863cdd> lazy-loaded attributes: info_cache,migration_context {{(pid=68638) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1180.346699] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e663b8-adcc-4513-8343-05e4990e5d0a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.365314] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ccc9f69-00cd-4540-88dd-d803a1f81750 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.370432] env[68638]: DEBUG oslo_vmware.api [None req-5161a9bf-8ac6-40ac-9224-ee3b110106c2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1180.370432] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52f13d26-b038-df10-989a-bbf999fe8c4e" [ 1180.370432] env[68638]: _type = "Task" [ 1180.370432] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.380655] env[68638]: DEBUG oslo_vmware.api [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834527, 'name': ReconfigVM_Task, 'duration_secs': 0.921013} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.383872] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Reconfigured VM instance instance-00000074 to attach disk [datastore2] 2e788c4c-f6d1-4001-9389-1068887d205f/2e788c4c-f6d1-4001-9389-1068887d205f.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1180.384507] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Updating instance '2e788c4c-f6d1-4001-9389-1068887d205f' progress to 50 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1180.388167] env[68638]: DEBUG oslo_vmware.api [None req-5161a9bf-8ac6-40ac-9224-ee3b110106c2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f13d26-b038-df10-989a-bbf999fe8c4e, 'name': SearchDatastore_Task, 'duration_secs': 0.006831} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.388588] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5161a9bf-8ac6-40ac-9224-ee3b110106c2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1180.608808] env[68638]: DEBUG oslo_concurrency.lockutils [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1180.762771] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834529, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.035792} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.764960] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1180.765159] env[68638]: DEBUG oslo_concurrency.lockutils [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a04f22d5-e7b9-474d-8313-7a4349baeb1c/a04f22d5-e7b9-474d-8313-7a4349baeb1c.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1180.765432] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a04f22d5-e7b9-474d-8313-7a4349baeb1c/a04f22d5-e7b9-474d-8313-7a4349baeb1c.vmdk to [datastore2] a576ba6f-5e3b-4408-b95d-2084a072ec12/a576ba6f-5e3b-4408-b95d-2084a072ec12.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1180.765856] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1bc290a7-5ac6-4f89-8cdc-ee5b87abdbd4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.772563] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1180.772563] env[68638]: value = "task-2834531" [ 1180.772563] env[68638]: _type = "Task" [ 1180.772563] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.782491] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834531, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.787734] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-714ac8c4-a0fc-41ac-90c6-363e85d2efa2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.794680] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c16bbc-07fa-4a5d-9cd4-5978cbe7ca99 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.825613] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05333e02-c68c-41e2-963f-f5b4f646e139 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.835539] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb082bb-ab9b-478b-a7d4-9b9cc4667506 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.844055] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834530, 'name': PowerOffVM_Task, 'duration_secs': 0.229739} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.852326] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1180.852675] env[68638]: DEBUG nova.compute.provider_tree [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1180.854609] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c3d28f-b040-4a5d-b050-b007291f9834 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.872951] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f0d6184-50ae-49de-9d7a-efbcf3583f92 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.890425] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58516a1f-509c-41be-b75c-037212166b67 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.908748] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45c9e626-a376-43a6-b9d1-d7b6d66c23b6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.926782] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Updating instance '2e788c4c-f6d1-4001-9389-1068887d205f' progress to 67 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1181.283925] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834531, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.305138] env[68638]: DEBUG nova.compute.manager [req-ef725376-bcf4-4a2a-aab3-de9de269c87e req-e4a3f780-9242-4834-b150-91f74682dd46 service nova] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Received event network-changed-da4e63a0-6fb2-436b-b720-8c1e1b21decf {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1181.305438] env[68638]: DEBUG nova.compute.manager [req-ef725376-bcf4-4a2a-aab3-de9de269c87e req-e4a3f780-9242-4834-b150-91f74682dd46 service nova] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Refreshing instance network info cache due to event network-changed-da4e63a0-6fb2-436b-b720-8c1e1b21decf. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1181.305566] env[68638]: DEBUG oslo_concurrency.lockutils [req-ef725376-bcf4-4a2a-aab3-de9de269c87e req-e4a3f780-9242-4834-b150-91f74682dd46 service nova] Acquiring lock "refresh_cache-e2e74700-aa83-484a-a61f-9f98a6019fdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.305699] env[68638]: DEBUG oslo_concurrency.lockutils [req-ef725376-bcf4-4a2a-aab3-de9de269c87e req-e4a3f780-9242-4834-b150-91f74682dd46 service nova] Acquired lock "refresh_cache-e2e74700-aa83-484a-a61f-9f98a6019fdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1181.305857] env[68638]: DEBUG nova.network.neutron [req-ef725376-bcf4-4a2a-aab3-de9de269c87e req-e4a3f780-9242-4834-b150-91f74682dd46 service nova] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Refreshing network info cache for port da4e63a0-6fb2-436b-b720-8c1e1b21decf {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1181.340012] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1181.340201] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68638) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1181.340430] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager.update_available_resource {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1181.359146] env[68638]: DEBUG nova.scheduler.client.report [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1181.383451] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Creating Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1181.384501] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d67e713e-09b3-4775-a395-225b69d7449d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.392236] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1181.392236] env[68638]: value = "task-2834532" [ 1181.392236] env[68638]: _type = "Task" [ 1181.392236] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.401083] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834532, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.468050] env[68638]: DEBUG nova.network.neutron [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Port c9f8dd22-b056-4864-91c0-671a170e81bd binding to destination host cpu-1 is already ACTIVE {{(pid=68638) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1181.787407] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834531, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.844153] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1181.866166] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.746s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1181.869082] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5161a9bf-8ac6-40ac-9224-ee3b110106c2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.480s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1181.886407] env[68638]: INFO nova.scheduler.client.report [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Deleted allocations for instance ace44b04-6dcf-4845-af4e-b28ddeebe60e [ 1181.902713] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834532, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.055548] env[68638]: DEBUG nova.network.neutron [req-ef725376-bcf4-4a2a-aab3-de9de269c87e req-e4a3f780-9242-4834-b150-91f74682dd46 service nova] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Updated VIF entry in instance network info cache for port da4e63a0-6fb2-436b-b720-8c1e1b21decf. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1182.055923] env[68638]: DEBUG nova.network.neutron [req-ef725376-bcf4-4a2a-aab3-de9de269c87e req-e4a3f780-9242-4834-b150-91f74682dd46 service nova] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Updating instance_info_cache with network_info: [{"id": "da4e63a0-6fb2-436b-b720-8c1e1b21decf", "address": "fa:16:3e:0d:6b:3d", "network": {"id": "5de0e424-8bf1-4515-8c49-06607ad85c61", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1760008184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ccc24eaf6cf74d539558c0a736e18c3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda4e63a0-6f", "ovs_interfaceid": "da4e63a0-6fb2-436b-b720-8c1e1b21decf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1182.283975] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834531, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.397366] env[68638]: DEBUG oslo_concurrency.lockutils [None req-74157ecc-cb87-4d83-be3e-0beeb7b5cebf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "ace44b04-6dcf-4845-af4e-b28ddeebe60e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.903s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1182.408586] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834532, 'name': CreateSnapshot_Task, 'duration_secs': 0.901431} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.411937] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Created Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1182.413060] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e0db746-422d-4587-8bc4-61ee64578fd7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.490515] env[68638]: DEBUG oslo_concurrency.lockutils [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "2e788c4c-f6d1-4001-9389-1068887d205f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1182.490753] env[68638]: DEBUG oslo_concurrency.lockutils [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "2e788c4c-f6d1-4001-9389-1068887d205f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1182.490927] env[68638]: DEBUG oslo_concurrency.lockutils [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "2e788c4c-f6d1-4001-9389-1068887d205f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1182.558721] env[68638]: DEBUG oslo_concurrency.lockutils [req-ef725376-bcf4-4a2a-aab3-de9de269c87e req-e4a3f780-9242-4834-b150-91f74682dd46 service nova] Releasing lock "refresh_cache-e2e74700-aa83-484a-a61f-9f98a6019fdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1182.610524] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5931dff-306f-489b-a292-794ed10a5634 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.619698] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-740fbdef-70d1-4990-940d-d1562a6321ad {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.653035] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee58e19c-abe9-405e-8294-d9dd03f729e8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.661234] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c4eed18-7b7c-4f54-a4e9-f3ca5e2ce83b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.675569] env[68638]: DEBUG nova.compute.provider_tree [None req-5161a9bf-8ac6-40ac-9224-ee3b110106c2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1182.783817] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834531, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.936676] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Creating linked-clone VM from snapshot {{(pid=68638) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1182.937052] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b218f948-d2c4-4802-a728-126496b52015 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.945956] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1182.945956] env[68638]: value = "task-2834534" [ 1182.945956] env[68638]: _type = "Task" [ 1182.945956] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.956567] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834534, 'name': CloneVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.179583] env[68638]: DEBUG nova.scheduler.client.report [None req-5161a9bf-8ac6-40ac-9224-ee3b110106c2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1183.284136] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834531, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.33198} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.284424] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a04f22d5-e7b9-474d-8313-7a4349baeb1c/a04f22d5-e7b9-474d-8313-7a4349baeb1c.vmdk to [datastore2] a576ba6f-5e3b-4408-b95d-2084a072ec12/a576ba6f-5e3b-4408-b95d-2084a072ec12.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1183.285243] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0020432-5f12-4cf1-b3e0-f2386f75a329 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.307644] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] a576ba6f-5e3b-4408-b95d-2084a072ec12/a576ba6f-5e3b-4408-b95d-2084a072ec12.vmdk or device None with type streamOptimized {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1183.307936] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb56d12d-b397-488e-8b45-4ee3ce7384f0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.327184] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1183.327184] env[68638]: value = "task-2834535" [ 1183.327184] env[68638]: _type = "Task" [ 1183.327184] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.334968] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834535, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.456714] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834534, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.529430] env[68638]: DEBUG oslo_concurrency.lockutils [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "refresh_cache-2e788c4c-f6d1-4001-9389-1068887d205f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.529680] env[68638]: DEBUG oslo_concurrency.lockutils [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired lock "refresh_cache-2e788c4c-f6d1-4001-9389-1068887d205f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1183.529835] env[68638]: DEBUG nova.network.neutron [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1183.838249] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834535, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.956980] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834534, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.190492] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5161a9bf-8ac6-40ac-9224-ee3b110106c2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.322s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1184.194756] env[68638]: DEBUG oslo_concurrency.lockutils [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.585s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1184.194756] env[68638]: DEBUG nova.objects.instance [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lazy-loading 'resources' on Instance uuid cc2e9758-45ee-4e94-ad74-ba7d6c85f06d {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1184.297597] env[68638]: DEBUG nova.network.neutron [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Updating instance_info_cache with network_info: [{"id": "c9f8dd22-b056-4864-91c0-671a170e81bd", "address": "fa:16:3e:d1:2d:01", "network": {"id": "4ccf9e56-9fb3-48ff-bf2d-a35faedb905b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1191830363-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ae89c3992e04141bf24be9d9e84e302", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9f8dd22-b0", "ovs_interfaceid": "c9f8dd22-b056-4864-91c0-671a170e81bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.339161] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834535, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.461043] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834534, 'name': CloneVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.753458] env[68638]: INFO nova.scheduler.client.report [None req-5161a9bf-8ac6-40ac-9224-ee3b110106c2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Deleted allocation for migration 3233046a-fd8f-452a-bd4e-9ee9e4fa379f [ 1184.800525] env[68638]: DEBUG oslo_concurrency.lockutils [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Releasing lock "refresh_cache-2e788c4c-f6d1-4001-9389-1068887d205f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1184.840602] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834535, 'name': ReconfigVM_Task, 'duration_secs': 1.085002} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.840803] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Reconfigured VM instance instance-00000069 to attach disk [datastore2] a576ba6f-5e3b-4408-b95d-2084a072ec12/a576ba6f-5e3b-4408-b95d-2084a072ec12.vmdk or device None with type streamOptimized {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1184.842041] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7b7aac02-3b77-4d11-9ff8-db309d4539ca {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.848630] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1184.848630] env[68638]: value = "task-2834536" [ 1184.848630] env[68638]: _type = "Task" [ 1184.848630] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.858862] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834536, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.903216] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3674763b-104f-4286-8d71-9647599c9088 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.910970] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ffb523-0b47-4fa2-85f7-791699822985 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.942722] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62bdf5d-2068-4012-ab24-0899d813e668 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.950398] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f9b740a-7a27-4986-93fe-ca479aa0f344 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.967595] env[68638]: DEBUG nova.compute.provider_tree [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1184.971841] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834534, 'name': CloneVM_Task, 'duration_secs': 1.584745} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.972324] env[68638]: INFO nova.virt.vmwareapi.vmops [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Created linked-clone VM from snapshot [ 1184.973055] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad066dd-e866-42a9-9fde-a462ac6e0c19 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.980799] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Uploading image 629cabe9-6521-40cd-a0d0-8bc57ab4999b {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1185.710657] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5161a9bf-8ac6-40ac-9224-ee3b110106c2 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "00a22fef-5d10-4413-a9aa-070a1a863cdd" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.290s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1185.714745] env[68638]: DEBUG nova.scheduler.client.report [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1185.732624] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834536, 'name': Rename_Task, 'duration_secs': 0.474098} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.733881] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1185.733881] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56136fc9-073b-49a5-ae07-a819dce32058 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.740725] env[68638]: DEBUG oslo_vmware.rw_handles [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1185.740725] env[68638]: value = "vm-570051" [ 1185.740725] env[68638]: _type = "VirtualMachine" [ 1185.740725] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1185.741726] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6d54878d-e7ac-4026-8fdb-4e07a08ffa86 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.745419] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1185.745419] env[68638]: value = "task-2834537" [ 1185.745419] env[68638]: _type = "Task" [ 1185.745419] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.750848] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7518f49b-7313-46b6-9448-c40dff07d1d1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.754454] env[68638]: DEBUG oslo_vmware.rw_handles [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lease: (returnval){ [ 1185.754454] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e030d8-e73b-57ce-4666-e6de5adbcd48" [ 1185.754454] env[68638]: _type = "HttpNfcLease" [ 1185.754454] env[68638]: } obtained for exporting VM: (result){ [ 1185.754454] env[68638]: value = "vm-570051" [ 1185.754454] env[68638]: _type = "VirtualMachine" [ 1185.754454] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1185.754454] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the lease: (returnval){ [ 1185.754454] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e030d8-e73b-57ce-4666-e6de5adbcd48" [ 1185.754454] env[68638]: _type = "HttpNfcLease" [ 1185.754454] env[68638]: } to be ready. {{(pid=68638) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1185.774465] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834537, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.775828] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d5d7084-a3bc-4948-843a-f899fb6eba27 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.780146] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1185.780146] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e030d8-e73b-57ce-4666-e6de5adbcd48" [ 1185.780146] env[68638]: _type = "HttpNfcLease" [ 1185.780146] env[68638]: } is ready. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1185.781023] env[68638]: DEBUG oslo_vmware.rw_handles [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1185.781023] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e030d8-e73b-57ce-4666-e6de5adbcd48" [ 1185.781023] env[68638]: _type = "HttpNfcLease" [ 1185.781023] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1185.781792] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed83a094-e051-4c82-b7f8-60d631693291 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.786500] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Updating instance '2e788c4c-f6d1-4001-9389-1068887d205f' progress to 83 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1185.795190] env[68638]: DEBUG oslo_vmware.rw_handles [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52677943-80a6-5a01-a9d0-6c8fdf885f12/disk-0.vmdk from lease info. {{(pid=68638) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1185.795473] env[68638]: DEBUG oslo_vmware.rw_handles [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52677943-80a6-5a01-a9d0-6c8fdf885f12/disk-0.vmdk for reading. {{(pid=68638) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1185.888113] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a466e958-3432-4d43-be0b-d0a0e531de04 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.223795] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "00a22fef-5d10-4413-a9aa-070a1a863cdd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.224212] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "00a22fef-5d10-4413-a9aa-070a1a863cdd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.224444] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "00a22fef-5d10-4413-a9aa-070a1a863cdd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.224788] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "00a22fef-5d10-4413-a9aa-070a1a863cdd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.224905] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "00a22fef-5d10-4413-a9aa-070a1a863cdd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1186.227394] env[68638]: DEBUG oslo_concurrency.lockutils [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.033s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1186.229779] env[68638]: INFO nova.compute.manager [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Terminating instance [ 1186.231521] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 4.387s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.231950] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1186.232748] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68638) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1186.234758] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c097ae99-beb3-47e5-b84d-ba7a30f1950f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.245823] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7668eae-6bca-4847-9703-fe32c3586de3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.258198] env[68638]: INFO nova.scheduler.client.report [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Deleted allocations for instance cc2e9758-45ee-4e94-ad74-ba7d6c85f06d [ 1186.275055] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f2f862-5753-4b0b-98c2-cc7f654f04b0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.284127] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834537, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.290049] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35dec3dd-9074-4c00-aae0-de938c122e4a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.294799] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1186.295480] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ad414e5f-f628-4fe9-b199-da446268b5e4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.325039] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178909MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=68638) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1186.325223] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.325493] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.329907] env[68638]: DEBUG oslo_vmware.api [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1186.329907] env[68638]: value = "task-2834539" [ 1186.329907] env[68638]: _type = "Task" [ 1186.329907] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.339401] env[68638]: DEBUG oslo_vmware.api [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834539, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.364145] env[68638]: DEBUG oslo_concurrency.lockutils [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "d7fd30c6-3e0b-4564-9369-f29dc59a4d74" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.364620] env[68638]: DEBUG oslo_concurrency.lockutils [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "d7fd30c6-3e0b-4564-9369-f29dc59a4d74" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.739209] env[68638]: DEBUG nova.compute.manager [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1186.739596] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1186.740522] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f0238c8-0637-4dd9-adbd-1d23f0640f69 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.749031] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1186.749414] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1ed71e08-2ad2-46f6-8def-0b129596e3d2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.756832] env[68638]: DEBUG oslo_vmware.api [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1186.756832] env[68638]: value = "task-2834541" [ 1186.756832] env[68638]: _type = "Task" [ 1186.756832] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.760086] env[68638]: DEBUG oslo_vmware.api [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834537, 'name': PowerOnVM_Task, 'duration_secs': 0.582809} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.765306] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1186.772396] env[68638]: DEBUG oslo_vmware.api [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834541, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.780244] env[68638]: DEBUG oslo_concurrency.lockutils [None req-34a4649a-a256-4571-97ca-443a6557095b tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "cc2e9758-45ee-4e94-ad74-ba7d6c85f06d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.962s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1186.848280] env[68638]: DEBUG oslo_vmware.api [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834539, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.867693] env[68638]: DEBUG nova.compute.manager [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1186.896641] env[68638]: DEBUG nova.compute.manager [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1186.898731] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3441b23-dc85-47d5-9fe3-d4e976376fa5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.270441] env[68638]: DEBUG oslo_vmware.api [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834541, 'name': PowerOffVM_Task, 'duration_secs': 0.25155} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.270858] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1187.271117] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1187.271466] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d9e64244-5ffa-4afb-a436-2f6e9798d52a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.343230] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Applying migration context for instance 2e788c4c-f6d1-4001-9389-1068887d205f as it has an incoming, in-progress migration 1ae08ae0-f8a3-4c61-96fa-4b16d7ca8f4d. Migration status is post-migrating {{(pid=68638) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1187.344683] env[68638]: INFO nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Updating resource usage from migration 1ae08ae0-f8a3-4c61-96fa-4b16d7ca8f4d [ 1187.347117] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1187.348032] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1187.348032] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Deleting the datastore file [datastore2] 00a22fef-5d10-4413-a9aa-070a1a863cdd {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1187.348624] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c1f0845-f81b-4eb5-8ab9-c7454d09e85c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.353929] env[68638]: DEBUG oslo_vmware.api [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834539, 'name': PowerOnVM_Task, 'duration_secs': 0.587383} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.354899] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1187.355544] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-82bec191-9a48-4098-b096-79c08beb8dd7 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Updating instance '2e788c4c-f6d1-4001-9389-1068887d205f' progress to 100 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1187.363039] env[68638]: DEBUG oslo_vmware.api [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1187.363039] env[68638]: value = "task-2834543" [ 1187.363039] env[68638]: _type = "Task" [ 1187.363039] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.369349] env[68638]: DEBUG oslo_vmware.api [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834543, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.370442] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 423af2cc-4dea-445f-a01c-6d4d57c3f0de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1187.370667] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 53e92f51-9010-4fb2-89e1-9d16a252ef6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1187.370889] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance df2e066d-7c71-4aec-ab9b-a339a7ff21fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1187.371020] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 7d99d946-f2df-4d31-911f-ac479849b901 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1187.371138] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1187.371250] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 25c35c36-71c9-48cd-b7e4-6293eef890e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1187.371360] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 00a22fef-5d10-4413-a9aa-070a1a863cdd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1187.371472] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance e2e74700-aa83-484a-a61f-9f98a6019fdb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1187.371581] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance a576ba6f-5e3b-4408-b95d-2084a072ec12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1187.371689] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Migration 1ae08ae0-f8a3-4c61-96fa-4b16d7ca8f4d is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1187.371794] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 2e788c4c-f6d1-4001-9389-1068887d205f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1187.389314] env[68638]: DEBUG oslo_concurrency.lockutils [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.420087] env[68638]: DEBUG oslo_concurrency.lockutils [None req-625172ba-0d97-4484-8155-dc7f0148a7a5 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "a576ba6f-5e3b-4408-b95d-2084a072ec12" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 22.994s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1187.494351] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "7d99d946-f2df-4d31-911f-ac479849b901" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.494678] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "7d99d946-f2df-4d31-911f-ac479849b901" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.494942] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "7d99d946-f2df-4d31-911f-ac479849b901-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.495313] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "7d99d946-f2df-4d31-911f-ac479849b901-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.495560] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "7d99d946-f2df-4d31-911f-ac479849b901-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1187.498039] env[68638]: INFO nova.compute.manager [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Terminating instance [ 1187.875054] env[68638]: DEBUG oslo_vmware.api [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834543, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.400817} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.875339] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1187.876035] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1187.876035] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1187.876035] env[68638]: INFO nova.compute.manager [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1187.876252] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1187.876995] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance d7fd30c6-3e0b-4564-9369-f29dc59a4d74 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1187.877144] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1187.877320] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2752MB phys_disk=200GB used_disk=11GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1187.879689] env[68638]: DEBUG nova.compute.manager [-] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1187.879823] env[68638]: DEBUG nova.network.neutron [-] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1188.002893] env[68638]: DEBUG nova.compute.manager [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1188.003178] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1188.004772] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97f6267d-ed6f-495f-8cd9-d13df12db17e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.014705] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1188.014944] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1e66ab15-b780-42c1-a989-80f127ff032b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.024330] env[68638]: DEBUG oslo_vmware.api [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1188.024330] env[68638]: value = "task-2834544" [ 1188.024330] env[68638]: _type = "Task" [ 1188.024330] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.036679] env[68638]: DEBUG oslo_vmware.api [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834544, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.085858] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b258571-2578-4dcb-a99a-299c19ad9fb8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.095187] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9622b55f-764f-4d0c-838c-521d01b167db {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.127607] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131fd7ca-e315-48aa-9633-6c8356f85adc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.135787] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1555f79a-19b7-457b-befc-1491d2d22e18 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.153369] env[68638]: DEBUG nova.compute.provider_tree [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1188.319968] env[68638]: DEBUG nova.compute.manager [req-a601630f-c6cf-48f6-9edf-ccf8bfe571b5 req-0a5f7523-db9d-4dc2-a99b-f1d67f30633b service nova] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Received event network-vif-deleted-b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1188.320265] env[68638]: INFO nova.compute.manager [req-a601630f-c6cf-48f6-9edf-ccf8bfe571b5 req-0a5f7523-db9d-4dc2-a99b-f1d67f30633b service nova] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Neutron deleted interface b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8; detaching it from the instance and deleting it from the info cache [ 1188.320430] env[68638]: DEBUG nova.network.neutron [req-a601630f-c6cf-48f6-9edf-ccf8bfe571b5 req-0a5f7523-db9d-4dc2-a99b-f1d67f30633b service nova] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.452968] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b908d0fd-846e-4ecb-9f54-45334a675ef1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.458963] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f58c6e56-d742-4b43-89ad-8ec7dbd0693f tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Suspending the VM {{(pid=68638) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1188.459245] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-5c3f5a1d-e939-4986-bfd3-d1fb34ccbe2f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.465539] env[68638]: DEBUG oslo_vmware.api [None req-f58c6e56-d742-4b43-89ad-8ec7dbd0693f tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1188.465539] env[68638]: value = "task-2834545" [ 1188.465539] env[68638]: _type = "Task" [ 1188.465539] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.473130] env[68638]: DEBUG oslo_vmware.api [None req-f58c6e56-d742-4b43-89ad-8ec7dbd0693f tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834545, 'name': SuspendVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.534137] env[68638]: DEBUG oslo_vmware.api [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834544, 'name': PowerOffVM_Task, 'duration_secs': 0.26081} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.535533] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1188.535709] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1188.536260] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a6dc3a26-a707-49e3-a66f-fb11f14053cf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.606303] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1188.606575] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1188.606803] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Deleting the datastore file [datastore1] 7d99d946-f2df-4d31-911f-ac479849b901 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1188.607251] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14fd311c-4860-4e18-b50c-7f53eb48ad90 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.613666] env[68638]: DEBUG oslo_vmware.api [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1188.613666] env[68638]: value = "task-2834547" [ 1188.613666] env[68638]: _type = "Task" [ 1188.613666] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.622696] env[68638]: DEBUG oslo_vmware.api [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834547, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.656070] env[68638]: DEBUG nova.scheduler.client.report [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1188.679194] env[68638]: DEBUG nova.network.neutron [-] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.822728] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f4e5fb6b-d320-499f-81ce-d2e984214368 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.831984] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c39d4a5b-ee55-411d-80c4-f3b857618736 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.862689] env[68638]: DEBUG nova.compute.manager [req-a601630f-c6cf-48f6-9edf-ccf8bfe571b5 req-0a5f7523-db9d-4dc2-a99b-f1d67f30633b service nova] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Detach interface failed, port_id=b1a8c37a-0ea6-47fb-b2ac-16b0e4ce85d8, reason: Instance 00a22fef-5d10-4413-a9aa-070a1a863cdd could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1188.975776] env[68638]: DEBUG oslo_vmware.api [None req-f58c6e56-d742-4b43-89ad-8ec7dbd0693f tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834545, 'name': SuspendVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.128986] env[68638]: DEBUG oslo_vmware.api [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834547, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.28825} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.129486] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1189.129809] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1189.130128] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1189.130440] env[68638]: INFO nova.compute.manager [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1189.131042] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1189.131271] env[68638]: DEBUG nova.compute.manager [-] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1189.131335] env[68638]: DEBUG nova.network.neutron [-] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1189.162658] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68638) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1189.162881] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.837s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1189.163195] env[68638]: DEBUG oslo_concurrency.lockutils [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.774s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1189.164640] env[68638]: INFO nova.compute.claims [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1189.184136] env[68638]: INFO nova.compute.manager [-] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Took 1.30 seconds to deallocate network for instance. [ 1189.478046] env[68638]: DEBUG oslo_vmware.api [None req-f58c6e56-d742-4b43-89ad-8ec7dbd0693f tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834545, 'name': SuspendVM_Task, 'duration_secs': 0.645014} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.478504] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f58c6e56-d742-4b43-89ad-8ec7dbd0693f tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Suspended the VM {{(pid=68638) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1189.478504] env[68638]: DEBUG nova.compute.manager [None req-f58c6e56-d742-4b43-89ad-8ec7dbd0693f tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1189.479316] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e50f62-b0ec-458b-96d4-50382f73f4f0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.692381] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1189.911477] env[68638]: DEBUG nova.network.neutron [-] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1189.995509] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "2e788c4c-f6d1-4001-9389-1068887d205f" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1189.995859] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "2e788c4c-f6d1-4001-9389-1068887d205f" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1189.996164] env[68638]: DEBUG nova.compute.manager [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Going to confirm migration 6 {{(pid=68638) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1190.111655] env[68638]: DEBUG oslo_concurrency.lockutils [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "423af2cc-4dea-445f-a01c-6d4d57c3f0de" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1190.111930] env[68638]: DEBUG oslo_concurrency.lockutils [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "423af2cc-4dea-445f-a01c-6d4d57c3f0de" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1190.112151] env[68638]: DEBUG oslo_concurrency.lockutils [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "423af2cc-4dea-445f-a01c-6d4d57c3f0de-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1190.112341] env[68638]: DEBUG oslo_concurrency.lockutils [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "423af2cc-4dea-445f-a01c-6d4d57c3f0de-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1190.112512] env[68638]: DEBUG oslo_concurrency.lockutils [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "423af2cc-4dea-445f-a01c-6d4d57c3f0de-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1190.114565] env[68638]: INFO nova.compute.manager [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Terminating instance [ 1190.341900] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57becfd0-f0d3-4db7-bd4f-1acc81603bd3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.349871] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751f855e-add7-45f2-863c-3333a15536a0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.381967] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f144e83a-f4c9-4cff-a27d-5332ec7acd36 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.390100] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb89eeec-3d9f-4e01-a427-7253c3e513dc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.404471] env[68638]: DEBUG nova.compute.provider_tree [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1190.414142] env[68638]: INFO nova.compute.manager [-] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Took 1.28 seconds to deallocate network for instance. [ 1190.460472] env[68638]: DEBUG nova.compute.manager [req-e4d96ef9-fe7b-48d6-81ed-187426298a76 req-12049480-80db-4eef-b3a1-5964cc35f98f service nova] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Received event network-vif-deleted-5b775bb7-0c12-417b-8cd1-bc0089e8658c {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1190.618057] env[68638]: DEBUG nova.compute.manager [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1190.618441] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1190.619231] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be577591-f1f9-4923-9215-246c507fdc63 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.627134] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1190.627433] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce93e17a-baa2-448e-9cf9-ef66685c7b9b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.634332] env[68638]: DEBUG oslo_vmware.api [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1190.634332] env[68638]: value = "task-2834548" [ 1190.634332] env[68638]: _type = "Task" [ 1190.634332] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.642918] env[68638]: DEBUG oslo_vmware.api [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834548, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.789782] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "refresh_cache-2e788c4c-f6d1-4001-9389-1068887d205f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.789989] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquired lock "refresh_cache-2e788c4c-f6d1-4001-9389-1068887d205f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1190.790183] env[68638]: DEBUG nova.network.neutron [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1190.790373] env[68638]: DEBUG nova.objects.instance [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lazy-loading 'info_cache' on Instance uuid 2e788c4c-f6d1-4001-9389-1068887d205f {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1190.907952] env[68638]: DEBUG nova.scheduler.client.report [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1190.920356] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1191.144907] env[68638]: DEBUG oslo_vmware.api [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834548, 'name': PowerOffVM_Task, 'duration_secs': 0.30099} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.145307] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1191.145512] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1191.145784] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-45b185da-348f-4014-9125-979aa931cdc0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.215154] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1191.215154] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1191.215568] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Deleting the datastore file [datastore1] 423af2cc-4dea-445f-a01c-6d4d57c3f0de {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1191.215627] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-47b71866-1fe7-483e-b376-c60f259d3be8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.222947] env[68638]: DEBUG oslo_vmware.api [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for the task: (returnval){ [ 1191.222947] env[68638]: value = "task-2834550" [ 1191.222947] env[68638]: _type = "Task" [ 1191.222947] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.231096] env[68638]: DEBUG oslo_vmware.api [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834550, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.414025] env[68638]: DEBUG oslo_concurrency.lockutils [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.251s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1191.414599] env[68638]: DEBUG nova.compute.manager [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1191.417346] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.725s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1191.417647] env[68638]: DEBUG nova.objects.instance [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lazy-loading 'resources' on Instance uuid 00a22fef-5d10-4413-a9aa-070a1a863cdd {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1191.469667] env[68638]: INFO nova.compute.manager [None req-9fa3a5a0-cb10-4ae9-b02f-9909da106d9d tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Resuming [ 1191.470342] env[68638]: DEBUG nova.objects.instance [None req-9fa3a5a0-cb10-4ae9-b02f-9909da106d9d tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lazy-loading 'flavor' on Instance uuid a576ba6f-5e3b-4408-b95d-2084a072ec12 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1191.733150] env[68638]: DEBUG oslo_vmware.api [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Task: {'id': task-2834550, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.217396} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.733558] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1191.733678] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1191.733886] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1191.734115] env[68638]: INFO nova.compute.manager [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1191.734398] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1191.734627] env[68638]: DEBUG nova.compute.manager [-] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1191.734756] env[68638]: DEBUG nova.network.neutron [-] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1191.922392] env[68638]: DEBUG nova.compute.utils [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1191.927069] env[68638]: DEBUG nova.compute.manager [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1191.927317] env[68638]: DEBUG nova.network.neutron [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1192.015522] env[68638]: DEBUG nova.network.neutron [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Updating instance_info_cache with network_info: [{"id": "c9f8dd22-b056-4864-91c0-671a170e81bd", "address": "fa:16:3e:d1:2d:01", "network": {"id": "4ccf9e56-9fb3-48ff-bf2d-a35faedb905b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1191830363-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ae89c3992e04141bf24be9d9e84e302", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9f8dd22-b0", "ovs_interfaceid": "c9f8dd22-b056-4864-91c0-671a170e81bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1192.017732] env[68638]: DEBUG nova.policy [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7569a0fd95c644d38ef18de41870bde4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35fdd5447a0546b7b0fe2ed9ea0efc73', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1192.112868] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d3ad43d-06ae-4c86-8f30-2929fe54b3e7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.120701] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8d11d4-d957-4204-924e-fcd2503c93ca {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.150721] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5f729a-3eaf-4f61-9695-1e3fd86bc457 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.157931] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f002fd87-f950-4a98-ad31-c1fcc69e90c7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.173483] env[68638]: DEBUG nova.compute.provider_tree [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1192.334985] env[68638]: DEBUG nova.network.neutron [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Successfully created port: dfb906bd-6a6e-40d8-aea5-e9c4415dd8e6 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1192.428060] env[68638]: DEBUG nova.compute.manager [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1192.512576] env[68638]: DEBUG nova.network.neutron [-] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1192.521359] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Releasing lock "refresh_cache-2e788c4c-f6d1-4001-9389-1068887d205f" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1192.521698] env[68638]: DEBUG nova.objects.instance [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lazy-loading 'migration_context' on Instance uuid 2e788c4c-f6d1-4001-9389-1068887d205f {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1192.585302] env[68638]: DEBUG nova.compute.manager [req-46bcfa2c-3307-42af-a40f-7cc28b512e8e req-8edd084c-16b2-4da7-8eb0-b7a94381c1a6 service nova] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Received event network-vif-deleted-83c2852d-0228-4c4e-b754-0dc81d6b8a11 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1192.676316] env[68638]: DEBUG nova.scheduler.client.report [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1192.986356] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9fa3a5a0-cb10-4ae9-b02f-9909da106d9d tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquiring lock "refresh_cache-a576ba6f-5e3b-4408-b95d-2084a072ec12" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.986744] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9fa3a5a0-cb10-4ae9-b02f-9909da106d9d tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquired lock "refresh_cache-a576ba6f-5e3b-4408-b95d-2084a072ec12" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1192.986744] env[68638]: DEBUG nova.network.neutron [None req-9fa3a5a0-cb10-4ae9-b02f-9909da106d9d tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1193.013332] env[68638]: INFO nova.compute.manager [-] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Took 1.28 seconds to deallocate network for instance. [ 1193.025226] env[68638]: DEBUG nova.objects.base [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Object Instance<2e788c4c-f6d1-4001-9389-1068887d205f> lazy-loaded attributes: info_cache,migration_context {{(pid=68638) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1193.026239] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-868699bb-76c9-456f-93c3-dbe3a902b81e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.048251] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc38fd92-2a48-47a3-b77d-4476d1518067 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.054306] env[68638]: DEBUG oslo_vmware.api [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1193.054306] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5267f5d3-f3a1-9cb6-0869-179ec4bc5243" [ 1193.054306] env[68638]: _type = "Task" [ 1193.054306] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.062605] env[68638]: DEBUG oslo_vmware.api [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5267f5d3-f3a1-9cb6-0869-179ec4bc5243, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.181496] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.764s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.184321] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.264s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.184587] env[68638]: DEBUG nova.objects.instance [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lazy-loading 'resources' on Instance uuid 7d99d946-f2df-4d31-911f-ac479849b901 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1193.205206] env[68638]: INFO nova.scheduler.client.report [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Deleted allocations for instance 00a22fef-5d10-4413-a9aa-070a1a863cdd [ 1193.370014] env[68638]: DEBUG oslo_vmware.rw_handles [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52677943-80a6-5a01-a9d0-6c8fdf885f12/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1193.370986] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a3d480a-bcaa-4739-9b3c-29ec56bbc048 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.377272] env[68638]: DEBUG oslo_vmware.rw_handles [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52677943-80a6-5a01-a9d0-6c8fdf885f12/disk-0.vmdk is in state: ready. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1193.377478] env[68638]: ERROR oslo_vmware.rw_handles [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52677943-80a6-5a01-a9d0-6c8fdf885f12/disk-0.vmdk due to incomplete transfer. [ 1193.377698] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-936d83b9-f7fa-4552-86ad-751b13a66e09 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.384925] env[68638]: DEBUG oslo_vmware.rw_handles [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52677943-80a6-5a01-a9d0-6c8fdf885f12/disk-0.vmdk. {{(pid=68638) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1193.385129] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Uploaded image 629cabe9-6521-40cd-a0d0-8bc57ab4999b to the Glance image server {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1193.387465] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Destroying the VM {{(pid=68638) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1193.387686] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-06ccaadf-7a02-4e6b-b61e-2c0257fe65a5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.393443] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1193.393443] env[68638]: value = "task-2834551" [ 1193.393443] env[68638]: _type = "Task" [ 1193.393443] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.401061] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834551, 'name': Destroy_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.437535] env[68638]: DEBUG nova.compute.manager [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1193.460895] env[68638]: DEBUG nova.virt.hardware [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1193.461156] env[68638]: DEBUG nova.virt.hardware [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1193.461318] env[68638]: DEBUG nova.virt.hardware [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1193.461507] env[68638]: DEBUG nova.virt.hardware [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1193.461654] env[68638]: DEBUG nova.virt.hardware [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1193.461799] env[68638]: DEBUG nova.virt.hardware [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1193.462050] env[68638]: DEBUG nova.virt.hardware [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1193.462239] env[68638]: DEBUG nova.virt.hardware [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1193.462415] env[68638]: DEBUG nova.virt.hardware [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1193.462580] env[68638]: DEBUG nova.virt.hardware [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1193.462754] env[68638]: DEBUG nova.virt.hardware [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1193.463623] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a90c452-eb41-4bd4-add2-1a82c95fab6e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.471432] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23aed961-8179-4ea6-977e-8db61c395b99 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.519349] env[68638]: DEBUG oslo_concurrency.lockutils [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.565986] env[68638]: DEBUG oslo_vmware.api [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5267f5d3-f3a1-9cb6-0869-179ec4bc5243, 'name': SearchDatastore_Task, 'duration_secs': 0.009004} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.566319] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.700261] env[68638]: DEBUG nova.network.neutron [None req-9fa3a5a0-cb10-4ae9-b02f-9909da106d9d tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Updating instance_info_cache with network_info: [{"id": "2725817f-dd0e-4f09-ba4d-70f48e578f8c", "address": "fa:16:3e:68:8a:2b", "network": {"id": "72c025a9-b352-4718-9ffb-469abb0f7099", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1791072145-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8938cbcafe93492e8f53613d992790bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2725817f-dd", "ovs_interfaceid": "2725817f-dd0e-4f09-ba4d-70f48e578f8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1193.715532] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d277fbcb-9439-4a0f-b098-70658f33f1d6 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "00a22fef-5d10-4413-a9aa-070a1a863cdd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.491s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.783617] env[68638]: DEBUG nova.compute.manager [req-2784f9e0-5b42-4a23-91fc-c9b037203c03 req-13f0e148-61b0-4361-9608-a561714dc85d service nova] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Received event network-vif-plugged-dfb906bd-6a6e-40d8-aea5-e9c4415dd8e6 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1193.783835] env[68638]: DEBUG oslo_concurrency.lockutils [req-2784f9e0-5b42-4a23-91fc-c9b037203c03 req-13f0e148-61b0-4361-9608-a561714dc85d service nova] Acquiring lock "d7fd30c6-3e0b-4564-9369-f29dc59a4d74-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.784099] env[68638]: DEBUG oslo_concurrency.lockutils [req-2784f9e0-5b42-4a23-91fc-c9b037203c03 req-13f0e148-61b0-4361-9608-a561714dc85d service nova] Lock "d7fd30c6-3e0b-4564-9369-f29dc59a4d74-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.784280] env[68638]: DEBUG oslo_concurrency.lockutils [req-2784f9e0-5b42-4a23-91fc-c9b037203c03 req-13f0e148-61b0-4361-9608-a561714dc85d service nova] Lock "d7fd30c6-3e0b-4564-9369-f29dc59a4d74-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.784507] env[68638]: DEBUG nova.compute.manager [req-2784f9e0-5b42-4a23-91fc-c9b037203c03 req-13f0e148-61b0-4361-9608-a561714dc85d service nova] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] No waiting events found dispatching network-vif-plugged-dfb906bd-6a6e-40d8-aea5-e9c4415dd8e6 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1193.784622] env[68638]: WARNING nova.compute.manager [req-2784f9e0-5b42-4a23-91fc-c9b037203c03 req-13f0e148-61b0-4361-9608-a561714dc85d service nova] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Received unexpected event network-vif-plugged-dfb906bd-6a6e-40d8-aea5-e9c4415dd8e6 for instance with vm_state building and task_state spawning. [ 1193.842727] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761a3e88-e51e-4b3b-b31a-5dabf0e8663d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.850052] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-216be2c9-a123-4db5-b94d-0bcb1689db95 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.881194] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b5c1609-e802-4bb1-bf9f-c47f1c97548b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.887167] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53f73c46-cb0a-4ccb-a326-07f4c144b87f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.903904] env[68638]: DEBUG nova.compute.provider_tree [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1193.909527] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834551, 'name': Destroy_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.203414] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9fa3a5a0-cb10-4ae9-b02f-9909da106d9d tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Releasing lock "refresh_cache-a576ba6f-5e3b-4408-b95d-2084a072ec12" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1194.205022] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05a45b59-44a9-4ac8-a995-6640e57b1e87 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.212214] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9fa3a5a0-cb10-4ae9-b02f-9909da106d9d tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Resuming the VM {{(pid=68638) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1194.212495] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1f2d7f5e-ceaf-4e29-b33f-b8f2a2918308 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.219844] env[68638]: DEBUG oslo_vmware.api [None req-9fa3a5a0-cb10-4ae9-b02f-9909da106d9d tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1194.219844] env[68638]: value = "task-2834552" [ 1194.219844] env[68638]: _type = "Task" [ 1194.219844] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.227920] env[68638]: DEBUG oslo_vmware.api [None req-9fa3a5a0-cb10-4ae9-b02f-9909da106d9d tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834552, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.373158] env[68638]: DEBUG nova.network.neutron [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Successfully updated port: dfb906bd-6a6e-40d8-aea5-e9c4415dd8e6 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1194.403532] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834551, 'name': Destroy_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.406532] env[68638]: DEBUG nova.scheduler.client.report [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1194.613958] env[68638]: DEBUG nova.compute.manager [req-898ae748-58db-4142-9af3-52e4c2816760 req-58496321-0f6e-4448-9268-2980bd26afca service nova] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Received event network-changed-dfb906bd-6a6e-40d8-aea5-e9c4415dd8e6 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1194.614195] env[68638]: DEBUG nova.compute.manager [req-898ae748-58db-4142-9af3-52e4c2816760 req-58496321-0f6e-4448-9268-2980bd26afca service nova] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Refreshing instance network info cache due to event network-changed-dfb906bd-6a6e-40d8-aea5-e9c4415dd8e6. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1194.614409] env[68638]: DEBUG oslo_concurrency.lockutils [req-898ae748-58db-4142-9af3-52e4c2816760 req-58496321-0f6e-4448-9268-2980bd26afca service nova] Acquiring lock "refresh_cache-d7fd30c6-3e0b-4564-9369-f29dc59a4d74" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.614557] env[68638]: DEBUG oslo_concurrency.lockutils [req-898ae748-58db-4142-9af3-52e4c2816760 req-58496321-0f6e-4448-9268-2980bd26afca service nova] Acquired lock "refresh_cache-d7fd30c6-3e0b-4564-9369-f29dc59a4d74" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1194.614720] env[68638]: DEBUG nova.network.neutron [req-898ae748-58db-4142-9af3-52e4c2816760 req-58496321-0f6e-4448-9268-2980bd26afca service nova] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Refreshing network info cache for port dfb906bd-6a6e-40d8-aea5-e9c4415dd8e6 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1194.731389] env[68638]: DEBUG oslo_vmware.api [None req-9fa3a5a0-cb10-4ae9-b02f-9909da106d9d tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834552, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.875523] env[68638]: DEBUG oslo_concurrency.lockutils [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "refresh_cache-d7fd30c6-3e0b-4564-9369-f29dc59a4d74" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.906024] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834551, 'name': Destroy_Task, 'duration_secs': 1.333902} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.906331] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Destroyed the VM [ 1194.906585] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Deleting Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1194.906882] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-8592c7f0-d9b2-4a85-8c9b-dff385110f1a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.911748] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.727s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.915629] env[68638]: DEBUG oslo_concurrency.lockutils [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.396s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1194.915879] env[68638]: DEBUG nova.objects.instance [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lazy-loading 'resources' on Instance uuid 423af2cc-4dea-445f-a01c-6d4d57c3f0de {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1194.917155] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1194.917155] env[68638]: value = "task-2834553" [ 1194.917155] env[68638]: _type = "Task" [ 1194.917155] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.925644] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834553, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.930902] env[68638]: INFO nova.scheduler.client.report [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Deleted allocations for instance 7d99d946-f2df-4d31-911f-ac479849b901 [ 1195.166701] env[68638]: DEBUG nova.network.neutron [req-898ae748-58db-4142-9af3-52e4c2816760 req-58496321-0f6e-4448-9268-2980bd26afca service nova] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1195.233244] env[68638]: DEBUG oslo_vmware.api [None req-9fa3a5a0-cb10-4ae9-b02f-9909da106d9d tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834552, 'name': PowerOnVM_Task, 'duration_secs': 0.513074} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.234336] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9fa3a5a0-cb10-4ae9-b02f-9909da106d9d tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Resumed the VM {{(pid=68638) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1195.234336] env[68638]: DEBUG nova.compute.manager [None req-9fa3a5a0-cb10-4ae9-b02f-9909da106d9d tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1195.234527] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a37eb23d-ff3e-46a4-9713-fcfa18eb87bb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.272055] env[68638]: DEBUG nova.network.neutron [req-898ae748-58db-4142-9af3-52e4c2816760 req-58496321-0f6e-4448-9268-2980bd26afca service nova] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.429440] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834553, 'name': RemoveSnapshot_Task, 'duration_secs': 0.34606} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.429581] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Deleted Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1195.429966] env[68638]: DEBUG nova.compute.manager [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1195.430764] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c7c33ff-3996-44c1-a359-a6b68c076915 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.444741] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e296877b-9487-4c81-894b-036165bbc57a tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "7d99d946-f2df-4d31-911f-ac479849b901" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.949s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.560520] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d747ff-dcfb-4b0a-aece-d5e66574775c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.568011] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42363c4b-301d-4024-9b89-1bf9a5c4f7b4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.597832] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "df2e066d-7c71-4aec-ab9b-a339a7ff21fb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1195.598109] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "df2e066d-7c71-4aec-ab9b-a339a7ff21fb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1195.598322] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "df2e066d-7c71-4aec-ab9b-a339a7ff21fb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1195.598517] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "df2e066d-7c71-4aec-ab9b-a339a7ff21fb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1195.598718] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "df2e066d-7c71-4aec-ab9b-a339a7ff21fb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.601094] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-547b0842-0415-42bc-a20b-76ef6adf4d49 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.603938] env[68638]: INFO nova.compute.manager [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Terminating instance [ 1195.612296] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789e0fdd-14d7-4129-9a9d-e918b1c6ce2b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.626799] env[68638]: DEBUG nova.compute.provider_tree [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1195.775193] env[68638]: DEBUG oslo_concurrency.lockutils [req-898ae748-58db-4142-9af3-52e4c2816760 req-58496321-0f6e-4448-9268-2980bd26afca service nova] Releasing lock "refresh_cache-d7fd30c6-3e0b-4564-9369-f29dc59a4d74" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1195.775562] env[68638]: DEBUG oslo_concurrency.lockutils [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired lock "refresh_cache-d7fd30c6-3e0b-4564-9369-f29dc59a4d74" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1195.775720] env[68638]: DEBUG nova.network.neutron [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1195.949378] env[68638]: INFO nova.compute.manager [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Shelve offloading [ 1196.118107] env[68638]: DEBUG nova.compute.manager [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1196.118359] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1196.119792] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d07f87b6-d1f2-47c5-b785-864dc5eba79d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.127870] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1196.128124] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dab04379-0d7f-4523-b8a8-30bb6c24d814 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.130213] env[68638]: DEBUG nova.scheduler.client.report [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1196.137907] env[68638]: DEBUG oslo_vmware.api [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1196.137907] env[68638]: value = "task-2834554" [ 1196.137907] env[68638]: _type = "Task" [ 1196.137907] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.145702] env[68638]: DEBUG oslo_vmware.api [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834554, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.311486] env[68638]: DEBUG nova.network.neutron [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1196.452845] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1196.453522] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb30f2f6-ce0d-4da3-af47-3d0ab681e858 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.460782] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1196.460782] env[68638]: value = "task-2834555" [ 1196.460782] env[68638]: _type = "Task" [ 1196.460782] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.468471] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834555, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.486583] env[68638]: DEBUG nova.network.neutron [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Updating instance_info_cache with network_info: [{"id": "dfb906bd-6a6e-40d8-aea5-e9c4415dd8e6", "address": "fa:16:3e:39:8b:ed", "network": {"id": "e7719a30-81aa-48f1-a272-5246f78d9891", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1890376720-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fdd5447a0546b7b0fe2ed9ea0efc73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfb906bd-6a", "ovs_interfaceid": "dfb906bd-6a6e-40d8-aea5-e9c4415dd8e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.635770] env[68638]: DEBUG oslo_concurrency.lockutils [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.720s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.638319] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 3.072s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.648243] env[68638]: DEBUG oslo_vmware.api [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834554, 'name': PowerOffVM_Task, 'duration_secs': 0.479522} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.648578] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1196.648673] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1196.648909] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8d1da76b-da58-48a1-95c1-47a90373fa5f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.655495] env[68638]: INFO nova.scheduler.client.report [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Deleted allocations for instance 423af2cc-4dea-445f-a01c-6d4d57c3f0de [ 1196.717033] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1196.717033] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1196.717033] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Deleting the datastore file [datastore1] df2e066d-7c71-4aec-ab9b-a339a7ff21fb {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1196.717033] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e7f66d9-1745-4403-bf42-26047c73d432 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.720838] env[68638]: DEBUG oslo_vmware.api [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for the task: (returnval){ [ 1196.720838] env[68638]: value = "task-2834557" [ 1196.720838] env[68638]: _type = "Task" [ 1196.720838] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.728209] env[68638]: DEBUG oslo_vmware.api [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834557, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.971216] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] VM already powered off {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1196.971460] env[68638]: DEBUG nova.compute.manager [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1196.972236] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b1f23ff-8098-4ea6-9eef-1c757c59b4e1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.977533] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "refresh_cache-25c35c36-71c9-48cd-b7e4-6293eef890e5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.977708] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquired lock "refresh_cache-25c35c36-71c9-48cd-b7e4-6293eef890e5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1196.977878] env[68638]: DEBUG nova.network.neutron [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1196.989182] env[68638]: DEBUG oslo_concurrency.lockutils [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Releasing lock "refresh_cache-d7fd30c6-3e0b-4564-9369-f29dc59a4d74" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1196.989444] env[68638]: DEBUG nova.compute.manager [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Instance network_info: |[{"id": "dfb906bd-6a6e-40d8-aea5-e9c4415dd8e6", "address": "fa:16:3e:39:8b:ed", "network": {"id": "e7719a30-81aa-48f1-a272-5246f78d9891", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1890376720-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fdd5447a0546b7b0fe2ed9ea0efc73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271a82f1-1d09-4ad3-9c15-07269bad114c", "external-id": "nsx-vlan-transportzone-441", "segmentation_id": 441, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfb906bd-6a", "ovs_interfaceid": "dfb906bd-6a6e-40d8-aea5-e9c4415dd8e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1196.990041] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:8b:ed', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271a82f1-1d09-4ad3-9c15-07269bad114c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dfb906bd-6a6e-40d8-aea5-e9c4415dd8e6', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1196.997528] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1196.997943] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1196.998180] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b7780827-b666-4807-8d3c-0bbc98e9b5a0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.018119] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1197.018119] env[68638]: value = "task-2834558" [ 1197.018119] env[68638]: _type = "Task" [ 1197.018119] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.025192] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834558, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.165453] env[68638]: DEBUG oslo_concurrency.lockutils [None req-25b2f06b-562b-4346-ae59-a369cc44d7a7 tempest-ServerActionsTestOtherA-716625588 tempest-ServerActionsTestOtherA-716625588-project-member] Lock "423af2cc-4dea-445f-a01c-6d4d57c3f0de" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.053s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1197.234132] env[68638]: DEBUG oslo_vmware.api [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Task: {'id': task-2834557, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144906} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.234526] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1197.234814] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1197.235097] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1197.235371] env[68638]: INFO nova.compute.manager [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1197.235843] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1197.236052] env[68638]: DEBUG nova.compute.manager [-] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1197.236138] env[68638]: DEBUG nova.network.neutron [-] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1197.277346] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85dc213f-3825-435c-8db9-fab43bba7c18 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.286399] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a619325a-9105-4ec2-b4a7-ae621846a616 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.316073] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17cfe743-45f0-43c3-90c9-aced1375cb34 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.323737] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-457178c6-a983-4006-a475-4590dc726605 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.336863] env[68638]: DEBUG nova.compute.provider_tree [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1197.538160] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834558, 'name': CreateVM_Task, 'duration_secs': 0.316802} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.540930] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1197.550584] env[68638]: DEBUG oslo_concurrency.lockutils [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1197.550694] env[68638]: DEBUG oslo_concurrency.lockutils [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1197.551036] env[68638]: DEBUG oslo_concurrency.lockutils [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1197.551512] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff47193d-744a-4e71-babd-82b16b49a086 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.557294] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1197.557294] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52f69825-b9e5-eece-14d7-ec5a908f0245" [ 1197.557294] env[68638]: _type = "Task" [ 1197.557294] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.566670] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f69825-b9e5-eece-14d7-ec5a908f0245, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.720501] env[68638]: DEBUG nova.network.neutron [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Updating instance_info_cache with network_info: [{"id": "3199e8a3-335c-43ff-be19-3881b85a0203", "address": "fa:16:3e:e5:0e:d2", "network": {"id": "4723bf0f-71b1-4997-b6ce-06f7dbda84df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-488516254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee5d59c43e974d04ba56981f2716ff60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3199e8a3-33", "ovs_interfaceid": "3199e8a3-335c-43ff-be19-3881b85a0203", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.775268] env[68638]: DEBUG nova.compute.manager [req-586665c7-f995-4b45-9b75-ed3e0835de32 req-9ceb49b5-f7bd-452d-9984-1a86db338682 service nova] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Received event network-vif-deleted-53ebdba3-fcaa-435f-a048-dd22fa9cc3b5 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1197.775564] env[68638]: INFO nova.compute.manager [req-586665c7-f995-4b45-9b75-ed3e0835de32 req-9ceb49b5-f7bd-452d-9984-1a86db338682 service nova] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Neutron deleted interface 53ebdba3-fcaa-435f-a048-dd22fa9cc3b5; detaching it from the instance and deleting it from the info cache [ 1197.775807] env[68638]: DEBUG nova.network.neutron [req-586665c7-f995-4b45-9b75-ed3e0835de32 req-9ceb49b5-f7bd-452d-9984-1a86db338682 service nova] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.840023] env[68638]: DEBUG nova.scheduler.client.report [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1198.041224] env[68638]: DEBUG nova.network.neutron [-] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.068102] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f69825-b9e5-eece-14d7-ec5a908f0245, 'name': SearchDatastore_Task, 'duration_secs': 0.009547} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.068424] env[68638]: DEBUG oslo_concurrency.lockutils [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1198.068677] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1198.068916] env[68638]: DEBUG oslo_concurrency.lockutils [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1198.069073] env[68638]: DEBUG oslo_concurrency.lockutils [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1198.069263] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1198.069536] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4d0c2aae-4c69-4e32-b7c0-4a09090389cf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.078615] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1198.078817] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1198.082729] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-326f3cf1-3e6a-476b-852c-c4a0137b784c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.085949] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1198.085949] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]527d7133-6814-37cf-cf86-2f10916c8297" [ 1198.085949] env[68638]: _type = "Task" [ 1198.085949] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.096468] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527d7133-6814-37cf-cf86-2f10916c8297, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.225678] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Releasing lock "refresh_cache-25c35c36-71c9-48cd-b7e4-6293eef890e5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1198.282382] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7e2e6428-9634-4230-89b2-87196909da55 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.298083] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac84335c-7803-4f6d-9147-9e8ce2750976 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.329552] env[68638]: DEBUG nova.compute.manager [req-586665c7-f995-4b45-9b75-ed3e0835de32 req-9ceb49b5-f7bd-452d-9984-1a86db338682 service nova] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Detach interface failed, port_id=53ebdba3-fcaa-435f-a048-dd22fa9cc3b5, reason: Instance df2e066d-7c71-4aec-ab9b-a339a7ff21fb could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1198.488145] env[68638]: DEBUG nova.compute.manager [req-873cbd8a-2707-41f9-9647-1cbfa88e04de req-7a8c4930-fade-483e-b8d1-dcfa95ac3dca service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Received event network-vif-unplugged-3199e8a3-335c-43ff-be19-3881b85a0203 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1198.488384] env[68638]: DEBUG oslo_concurrency.lockutils [req-873cbd8a-2707-41f9-9647-1cbfa88e04de req-7a8c4930-fade-483e-b8d1-dcfa95ac3dca service nova] Acquiring lock "25c35c36-71c9-48cd-b7e4-6293eef890e5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1198.488664] env[68638]: DEBUG oslo_concurrency.lockutils [req-873cbd8a-2707-41f9-9647-1cbfa88e04de req-7a8c4930-fade-483e-b8d1-dcfa95ac3dca service nova] Lock "25c35c36-71c9-48cd-b7e4-6293eef890e5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1198.488854] env[68638]: DEBUG oslo_concurrency.lockutils [req-873cbd8a-2707-41f9-9647-1cbfa88e04de req-7a8c4930-fade-483e-b8d1-dcfa95ac3dca service nova] Lock "25c35c36-71c9-48cd-b7e4-6293eef890e5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.489159] env[68638]: DEBUG nova.compute.manager [req-873cbd8a-2707-41f9-9647-1cbfa88e04de req-7a8c4930-fade-483e-b8d1-dcfa95ac3dca service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] No waiting events found dispatching network-vif-unplugged-3199e8a3-335c-43ff-be19-3881b85a0203 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1198.490337] env[68638]: WARNING nova.compute.manager [req-873cbd8a-2707-41f9-9647-1cbfa88e04de req-7a8c4930-fade-483e-b8d1-dcfa95ac3dca service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Received unexpected event network-vif-unplugged-3199e8a3-335c-43ff-be19-3881b85a0203 for instance with vm_state shelved and task_state shelving_offloading. [ 1198.544699] env[68638]: INFO nova.compute.manager [-] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Took 1.31 seconds to deallocate network for instance. [ 1198.577514] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1198.578456] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf0f1a1a-2172-4630-8999-869f059a8455 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.588214] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1198.591194] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22468bc4-6b16-4f0f-a745-0a6efdadd58b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.598249] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]527d7133-6814-37cf-cf86-2f10916c8297, 'name': SearchDatastore_Task, 'duration_secs': 0.011021} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.599114] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8768868e-640e-458a-85c9-089da55a7aeb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.603868] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1198.603868] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52bdf6e6-7ba4-7290-5381-8a9819d9e3c0" [ 1198.603868] env[68638]: _type = "Task" [ 1198.603868] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.613011] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52bdf6e6-7ba4-7290-5381-8a9819d9e3c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.657797] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1198.657962] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1198.658160] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Deleting the datastore file [datastore1] 25c35c36-71c9-48cd-b7e4-6293eef890e5 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1198.658429] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-926403c2-f1c7-48fa-98cd-01b4d0587903 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.665988] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1198.665988] env[68638]: value = "task-2834560" [ 1198.665988] env[68638]: _type = "Task" [ 1198.665988] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.674051] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834560, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.850291] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.212s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1199.054541] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1199.054858] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1199.055148] env[68638]: DEBUG nova.objects.instance [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lazy-loading 'resources' on Instance uuid df2e066d-7c71-4aec-ab9b-a339a7ff21fb {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1199.115249] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52bdf6e6-7ba4-7290-5381-8a9819d9e3c0, 'name': SearchDatastore_Task, 'duration_secs': 0.009873} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.115567] env[68638]: DEBUG oslo_concurrency.lockutils [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1199.115845] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] d7fd30c6-3e0b-4564-9369-f29dc59a4d74/d7fd30c6-3e0b-4564-9369-f29dc59a4d74.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1199.116127] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-266605e9-6136-4053-aa1e-f5aa271a4cf5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.123825] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1199.123825] env[68638]: value = "task-2834561" [ 1199.123825] env[68638]: _type = "Task" [ 1199.123825] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.134861] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834561, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.179021] env[68638]: DEBUG oslo_vmware.api [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834560, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133147} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.179021] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1199.179021] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1199.179021] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1199.209716] env[68638]: INFO nova.scheduler.client.report [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Deleted allocations for instance 25c35c36-71c9-48cd-b7e4-6293eef890e5 [ 1199.415505] env[68638]: INFO nova.scheduler.client.report [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Deleted allocation for migration 1ae08ae0-f8a3-4c61-96fa-4b16d7ca8f4d [ 1199.633878] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834561, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486098} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.636056] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] d7fd30c6-3e0b-4564-9369-f29dc59a4d74/d7fd30c6-3e0b-4564-9369-f29dc59a4d74.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1199.636447] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1199.636739] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ff8394b3-b65a-4a6e-b9c3-06747d1a420f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.643137] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1199.643137] env[68638]: value = "task-2834562" [ 1199.643137] env[68638]: _type = "Task" [ 1199.643137] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.654964] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834562, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.689954] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd43040-7ee3-4dfe-b375-7f2966214b46 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.698715] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7dd9062-785a-4db5-9a09-1a1054c5fe0c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.733685] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1199.735064] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4033428-c7af-42c3-8d00-4df87430562d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.743171] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e93406-7c59-4033-b3cb-fbe4024e1e39 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.758425] env[68638]: DEBUG nova.compute.provider_tree [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1199.781848] env[68638]: DEBUG oslo_concurrency.lockutils [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "2452dd7a-5f16-4094-9407-59405eed572b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1199.783286] env[68638]: DEBUG oslo_concurrency.lockutils [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "2452dd7a-5f16-4094-9407-59405eed572b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1199.921318] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "2e788c4c-f6d1-4001-9389-1068887d205f" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 9.925s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1200.153879] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834562, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069623} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.154172] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1200.154965] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-831dbe88-a7d3-4670-a172-cd4a62747cf3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.176300] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] d7fd30c6-3e0b-4564-9369-f29dc59a4d74/d7fd30c6-3e0b-4564-9369-f29dc59a4d74.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1200.176609] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-231befc8-3aa9-41ba-a5f9-610e2deb3663 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.198486] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1200.198486] env[68638]: value = "task-2834563" [ 1200.198486] env[68638]: _type = "Task" [ 1200.198486] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.206068] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834563, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.262476] env[68638]: DEBUG nova.scheduler.client.report [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1200.284807] env[68638]: DEBUG nova.compute.manager [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1200.615827] env[68638]: DEBUG nova.compute.manager [req-d10e9fbf-1367-4375-b040-d274d54ed948 req-919d2842-120f-464d-a592-fa32f0351f06 service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Received event network-changed-3199e8a3-335c-43ff-be19-3881b85a0203 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1200.616149] env[68638]: DEBUG nova.compute.manager [req-d10e9fbf-1367-4375-b040-d274d54ed948 req-919d2842-120f-464d-a592-fa32f0351f06 service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Refreshing instance network info cache due to event network-changed-3199e8a3-335c-43ff-be19-3881b85a0203. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1200.616449] env[68638]: DEBUG oslo_concurrency.lockutils [req-d10e9fbf-1367-4375-b040-d274d54ed948 req-919d2842-120f-464d-a592-fa32f0351f06 service nova] Acquiring lock "refresh_cache-25c35c36-71c9-48cd-b7e4-6293eef890e5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.616666] env[68638]: DEBUG oslo_concurrency.lockutils [req-d10e9fbf-1367-4375-b040-d274d54ed948 req-919d2842-120f-464d-a592-fa32f0351f06 service nova] Acquired lock "refresh_cache-25c35c36-71c9-48cd-b7e4-6293eef890e5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1200.616867] env[68638]: DEBUG nova.network.neutron [req-d10e9fbf-1367-4375-b040-d274d54ed948 req-919d2842-120f-464d-a592-fa32f0351f06 service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Refreshing network info cache for port 3199e8a3-335c-43ff-be19-3881b85a0203 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1200.715166] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834563, 'name': ReconfigVM_Task, 'duration_secs': 0.356201} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.715348] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Reconfigured VM instance instance-00000076 to attach disk [datastore1] d7fd30c6-3e0b-4564-9369-f29dc59a4d74/d7fd30c6-3e0b-4564-9369-f29dc59a4d74.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1200.716199] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-01a8fa80-01cd-4b51-a39b-d01700b5623a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.728019] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1200.728019] env[68638]: value = "task-2834564" [ 1200.728019] env[68638]: _type = "Task" [ 1200.728019] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.735795] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834564, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.767423] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.712s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1200.772350] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.037s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1200.772350] env[68638]: DEBUG nova.objects.instance [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lazy-loading 'resources' on Instance uuid 25c35c36-71c9-48cd-b7e4-6293eef890e5 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1200.802084] env[68638]: INFO nova.scheduler.client.report [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Deleted allocations for instance df2e066d-7c71-4aec-ab9b-a339a7ff21fb [ 1200.810713] env[68638]: DEBUG oslo_concurrency.lockutils [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1201.030629] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "2e788c4c-f6d1-4001-9389-1068887d205f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1201.030910] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "2e788c4c-f6d1-4001-9389-1068887d205f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1201.031146] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "2e788c4c-f6d1-4001-9389-1068887d205f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1201.031337] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "2e788c4c-f6d1-4001-9389-1068887d205f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1201.031507] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "2e788c4c-f6d1-4001-9389-1068887d205f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1201.033741] env[68638]: INFO nova.compute.manager [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Terminating instance [ 1201.209146] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquiring lock "a576ba6f-5e3b-4408-b95d-2084a072ec12" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1201.209427] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "a576ba6f-5e3b-4408-b95d-2084a072ec12" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1201.209642] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquiring lock "a576ba6f-5e3b-4408-b95d-2084a072ec12-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1201.209846] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "a576ba6f-5e3b-4408-b95d-2084a072ec12-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1201.210027] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "a576ba6f-5e3b-4408-b95d-2084a072ec12-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1201.212133] env[68638]: INFO nova.compute.manager [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Terminating instance [ 1201.236412] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834564, 'name': Rename_Task, 'duration_secs': 0.139604} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.236706] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1201.237062] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b9c511d2-00c7-4280-859d-0a724bcb2c4f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.245925] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1201.245925] env[68638]: value = "task-2834565" [ 1201.245925] env[68638]: _type = "Task" [ 1201.245925] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.266238] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834565, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.278293] env[68638]: DEBUG nova.objects.instance [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lazy-loading 'numa_topology' on Instance uuid 25c35c36-71c9-48cd-b7e4-6293eef890e5 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1201.313128] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9cceb08b-44e8-43b2-b891-047f1ab604bf tempest-ServerRescueNegativeTestJSON-1813447117 tempest-ServerRescueNegativeTestJSON-1813447117-project-member] Lock "df2e066d-7c71-4aec-ab9b-a339a7ff21fb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.715s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1201.333869] env[68638]: DEBUG nova.network.neutron [req-d10e9fbf-1367-4375-b040-d274d54ed948 req-919d2842-120f-464d-a592-fa32f0351f06 service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Updated VIF entry in instance network info cache for port 3199e8a3-335c-43ff-be19-3881b85a0203. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1201.334231] env[68638]: DEBUG nova.network.neutron [req-d10e9fbf-1367-4375-b040-d274d54ed948 req-919d2842-120f-464d-a592-fa32f0351f06 service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Updating instance_info_cache with network_info: [{"id": "3199e8a3-335c-43ff-be19-3881b85a0203", "address": "fa:16:3e:e5:0e:d2", "network": {"id": "4723bf0f-71b1-4997-b6ce-06f7dbda84df", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-488516254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee5d59c43e974d04ba56981f2716ff60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap3199e8a3-33", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.542267] env[68638]: DEBUG nova.compute.manager [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1201.542267] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1201.542267] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f140b75-146d-4b31-a1a7-a6fa1e13eb89 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.551574] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1201.551845] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-026dd671-f7ac-4451-aa74-e342eedbf4f4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.559438] env[68638]: DEBUG oslo_vmware.api [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1201.559438] env[68638]: value = "task-2834566" [ 1201.559438] env[68638]: _type = "Task" [ 1201.559438] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.569693] env[68638]: DEBUG oslo_vmware.api [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834566, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.715904] env[68638]: DEBUG nova.compute.manager [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1201.716267] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1201.717153] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90f91713-d4fa-406a-90bb-9496242ecd1c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.724704] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1201.724968] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-903ad3a6-d9bc-42b2-a46d-eb40389162fe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.730852] env[68638]: DEBUG oslo_vmware.api [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1201.730852] env[68638]: value = "task-2834567" [ 1201.730852] env[68638]: _type = "Task" [ 1201.730852] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.738412] env[68638]: DEBUG oslo_vmware.api [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834567, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.754490] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834565, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.779215] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "25c35c36-71c9-48cd-b7e4-6293eef890e5" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1201.781917] env[68638]: DEBUG nova.objects.base [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Object Instance<25c35c36-71c9-48cd-b7e4-6293eef890e5> lazy-loaded attributes: resources,numa_topology {{(pid=68638) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1201.837276] env[68638]: DEBUG oslo_concurrency.lockutils [req-d10e9fbf-1367-4375-b040-d274d54ed948 req-919d2842-120f-464d-a592-fa32f0351f06 service nova] Releasing lock "refresh_cache-25c35c36-71c9-48cd-b7e4-6293eef890e5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1201.924734] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3379b24f-4c45-4169-8c5d-56c948a4386c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.935564] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f302710c-8510-4c61-a79e-6f436ad3de9b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.972408] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7226e0aa-26b9-4d49-a08a-aa674cbd517a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.981217] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b2dee7a-d414-4653-8236-171073007658 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.997496] env[68638]: DEBUG nova.compute.provider_tree [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1202.070416] env[68638]: DEBUG oslo_vmware.api [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834566, 'name': PowerOffVM_Task, 'duration_secs': 0.193177} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.070736] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1202.070972] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1202.071472] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-04b2f18d-16ad-4173-9915-c26ac4e4887e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.132370] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1202.132675] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1202.132874] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Deleting the datastore file [datastore2] 2e788c4c-f6d1-4001-9389-1068887d205f {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1202.133377] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0d52b90f-a2bd-4ae2-8a8a-6357403cafb6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.141413] env[68638]: DEBUG oslo_vmware.api [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for the task: (returnval){ [ 1202.141413] env[68638]: value = "task-2834569" [ 1202.141413] env[68638]: _type = "Task" [ 1202.141413] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.153618] env[68638]: DEBUG oslo_vmware.api [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834569, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.241927] env[68638]: DEBUG oslo_vmware.api [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834567, 'name': PowerOffVM_Task, 'duration_secs': 0.340664} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.242205] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1202.242341] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1202.242832] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b1506db9-cc5b-405d-a673-c95d136f9920 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.256910] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834565, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.321239] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1202.321502] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1202.321645] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Deleting the datastore file [datastore2] a576ba6f-5e3b-4408-b95d-2084a072ec12 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1202.321980] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f138e3cc-ea44-4c66-a1a7-be26c83b0805 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.328756] env[68638]: DEBUG oslo_vmware.api [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for the task: (returnval){ [ 1202.328756] env[68638]: value = "task-2834571" [ 1202.328756] env[68638]: _type = "Task" [ 1202.328756] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.341056] env[68638]: DEBUG oslo_vmware.api [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834571, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.502636] env[68638]: DEBUG nova.scheduler.client.report [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1202.652283] env[68638]: DEBUG oslo_vmware.api [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Task: {'id': task-2834569, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150315} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.652703] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1202.653072] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1202.653372] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1202.653675] env[68638]: INFO nova.compute.manager [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1202.654056] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1202.654361] env[68638]: DEBUG nova.compute.manager [-] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1202.654558] env[68638]: DEBUG nova.network.neutron [-] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1202.762283] env[68638]: DEBUG oslo_vmware.api [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834565, 'name': PowerOnVM_Task, 'duration_secs': 1.073845} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.762283] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1202.762283] env[68638]: INFO nova.compute.manager [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Took 9.32 seconds to spawn the instance on the hypervisor. [ 1202.762283] env[68638]: DEBUG nova.compute.manager [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1202.762797] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de0be8f-9cd2-4182-9fcf-d3d72ea4bde1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.838813] env[68638]: DEBUG oslo_vmware.api [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Task: {'id': task-2834571, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14544} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.839368] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1202.839368] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1202.839720] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1202.840319] env[68638]: INFO nova.compute.manager [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1202.840319] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1202.840438] env[68638]: DEBUG nova.compute.manager [-] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1202.840489] env[68638]: DEBUG nova.network.neutron [-] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1203.009836] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.238s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1203.013217] env[68638]: DEBUG oslo_concurrency.lockutils [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.202s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1203.015916] env[68638]: INFO nova.compute.claims [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1203.176566] env[68638]: DEBUG nova.compute.manager [req-1e5fe377-4778-42a0-991c-b6ed614f73ce req-60bc4c19-242a-4179-8eea-6576fbd17995 service nova] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Received event network-vif-deleted-c9f8dd22-b056-4864-91c0-671a170e81bd {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1203.176670] env[68638]: INFO nova.compute.manager [req-1e5fe377-4778-42a0-991c-b6ed614f73ce req-60bc4c19-242a-4179-8eea-6576fbd17995 service nova] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Neutron deleted interface c9f8dd22-b056-4864-91c0-671a170e81bd; detaching it from the instance and deleting it from the info cache [ 1203.176867] env[68638]: DEBUG nova.network.neutron [req-1e5fe377-4778-42a0-991c-b6ed614f73ce req-60bc4c19-242a-4179-8eea-6576fbd17995 service nova] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.284631] env[68638]: INFO nova.compute.manager [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Took 15.91 seconds to build instance. [ 1203.321780] env[68638]: DEBUG nova.compute.manager [req-aedad75d-768f-4377-9904-57391e68d5b4 req-f06a7a59-865d-48ab-9311-05c9cf9fd704 service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Received event network-vif-deleted-2725817f-dd0e-4f09-ba4d-70f48e578f8c {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1203.321992] env[68638]: INFO nova.compute.manager [req-aedad75d-768f-4377-9904-57391e68d5b4 req-f06a7a59-865d-48ab-9311-05c9cf9fd704 service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Neutron deleted interface 2725817f-dd0e-4f09-ba4d-70f48e578f8c; detaching it from the instance and deleting it from the info cache [ 1203.322916] env[68638]: DEBUG nova.network.neutron [req-aedad75d-768f-4377-9904-57391e68d5b4 req-f06a7a59-865d-48ab-9311-05c9cf9fd704 service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.510728] env[68638]: DEBUG nova.network.neutron [-] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.532730] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e9c0d969-04d0-4bcc-8630-490671b2a891 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "25c35c36-71c9-48cd-b7e4-6293eef890e5" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 24.234s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1203.533959] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "25c35c36-71c9-48cd-b7e4-6293eef890e5" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.755s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1203.535025] env[68638]: INFO nova.compute.manager [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Unshelving [ 1203.634703] env[68638]: DEBUG nova.network.neutron [-] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.683437] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c524f8c-0e69-4c0f-9e7d-19d2ffe8e7fb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.694023] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0daa89b4-3049-4439-8d3e-74c6324fa2fc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.720980] env[68638]: DEBUG nova.compute.manager [req-1e5fe377-4778-42a0-991c-b6ed614f73ce req-60bc4c19-242a-4179-8eea-6576fbd17995 service nova] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Detach interface failed, port_id=c9f8dd22-b056-4864-91c0-671a170e81bd, reason: Instance 2e788c4c-f6d1-4001-9389-1068887d205f could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1203.788435] env[68638]: DEBUG oslo_concurrency.lockutils [None req-978bb56b-59e0-46d6-860f-9d9faba42e7d tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "d7fd30c6-3e0b-4564-9369-f29dc59a4d74" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.424s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1203.828827] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7b56f833-e368-47c5-be42-c86a2676887d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.841450] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da433468-a9d1-4d73-a001-917d24453222 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.885870] env[68638]: DEBUG nova.compute.manager [req-aedad75d-768f-4377-9904-57391e68d5b4 req-f06a7a59-865d-48ab-9311-05c9cf9fd704 service nova] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Detach interface failed, port_id=2725817f-dd0e-4f09-ba4d-70f48e578f8c, reason: Instance a576ba6f-5e3b-4408-b95d-2084a072ec12 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1203.930667] env[68638]: DEBUG oslo_concurrency.lockutils [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Acquiring lock "4db12faa-4c35-42ae-add5-19372e1d8807" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1203.930667] env[68638]: DEBUG oslo_concurrency.lockutils [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Lock "4db12faa-4c35-42ae-add5-19372e1d8807" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1204.014217] env[68638]: INFO nova.compute.manager [-] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Took 1.36 seconds to deallocate network for instance. [ 1204.140104] env[68638]: INFO nova.compute.manager [-] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Took 1.30 seconds to deallocate network for instance. [ 1204.161121] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0444002b-2012-4084-9bc1-cc7991f264bc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.174923] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-512a3076-e645-4049-8045-9407eeef1594 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.206266] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca904981-603d-4bb3-9c74-490564728c76 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.213924] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d8e3588-7ba1-47d1-b165-6fb5bfc5d5da {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.227634] env[68638]: DEBUG nova.compute.provider_tree [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1204.432761] env[68638]: DEBUG nova.compute.manager [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1204.521299] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1204.556887] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1204.631272] env[68638]: DEBUG oslo_concurrency.lockutils [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "d7fd30c6-3e0b-4564-9369-f29dc59a4d74" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1204.631512] env[68638]: DEBUG oslo_concurrency.lockutils [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "d7fd30c6-3e0b-4564-9369-f29dc59a4d74" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1204.631729] env[68638]: DEBUG oslo_concurrency.lockutils [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "d7fd30c6-3e0b-4564-9369-f29dc59a4d74-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1204.632060] env[68638]: DEBUG oslo_concurrency.lockutils [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "d7fd30c6-3e0b-4564-9369-f29dc59a4d74-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1204.632107] env[68638]: DEBUG oslo_concurrency.lockutils [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "d7fd30c6-3e0b-4564-9369-f29dc59a4d74-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1204.635485] env[68638]: INFO nova.compute.manager [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Terminating instance [ 1204.648341] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1204.733023] env[68638]: DEBUG nova.scheduler.client.report [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1204.955354] env[68638]: DEBUG oslo_concurrency.lockutils [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1205.140713] env[68638]: DEBUG nova.compute.manager [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1205.140953] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1205.141913] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbac8eed-132d-43e9-8850-1526f310570f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.151365] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1205.151685] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-99c4c28e-4f92-4368-aac4-954a176448c5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.159099] env[68638]: DEBUG oslo_vmware.api [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1205.159099] env[68638]: value = "task-2834572" [ 1205.159099] env[68638]: _type = "Task" [ 1205.159099] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.167854] env[68638]: DEBUG oslo_vmware.api [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834572, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.236447] env[68638]: DEBUG oslo_concurrency.lockutils [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.224s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1205.237082] env[68638]: DEBUG nova.compute.manager [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1205.241581] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.720s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.241868] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1205.245312] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.688s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.245985] env[68638]: DEBUG nova.objects.instance [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lazy-loading 'pci_requests' on Instance uuid 25c35c36-71c9-48cd-b7e4-6293eef890e5 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1205.269945] env[68638]: INFO nova.scheduler.client.report [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Deleted allocations for instance 2e788c4c-f6d1-4001-9389-1068887d205f [ 1205.671020] env[68638]: DEBUG oslo_vmware.api [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834572, 'name': PowerOffVM_Task, 'duration_secs': 0.277778} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.671360] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1205.671536] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1205.671802] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f2206404-e7d8-4508-98b0-369ff1c81deb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.737435] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1205.737725] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1205.737927] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Deleting the datastore file [datastore1] d7fd30c6-3e0b-4564-9369-f29dc59a4d74 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1205.738353] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5c775b50-6c4a-4060-82f9-50ea90261a47 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.744806] env[68638]: DEBUG oslo_vmware.api [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for the task: (returnval){ [ 1205.744806] env[68638]: value = "task-2834574" [ 1205.744806] env[68638]: _type = "Task" [ 1205.744806] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.749459] env[68638]: DEBUG nova.compute.utils [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1205.751901] env[68638]: DEBUG nova.objects.instance [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lazy-loading 'numa_topology' on Instance uuid 25c35c36-71c9-48cd-b7e4-6293eef890e5 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1205.752842] env[68638]: DEBUG nova.compute.manager [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1205.753015] env[68638]: DEBUG nova.network.neutron [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1205.761282] env[68638]: DEBUG oslo_vmware.api [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834574, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.779299] env[68638]: DEBUG oslo_concurrency.lockutils [None req-73bddc21-31ff-4aa5-9c2c-f4be18e8f008 tempest-DeleteServersTestJSON-1789780743 tempest-DeleteServersTestJSON-1789780743-project-member] Lock "2e788c4c-f6d1-4001-9389-1068887d205f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.748s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1205.802818] env[68638]: DEBUG nova.policy [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0fb22fd94276463ebb001ec679a36fec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c238a05699ee42f9a3d69c16f0777ae9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1206.140983] env[68638]: DEBUG nova.network.neutron [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Successfully created port: 7c25dd2c-4a3e-4c76-9747-5f4fc5f8d267 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1206.256646] env[68638]: DEBUG nova.compute.manager [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1206.259526] env[68638]: INFO nova.compute.claims [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1206.261951] env[68638]: DEBUG oslo_vmware.api [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Task: {'id': task-2834574, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.236754} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.262264] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1206.262446] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1206.262621] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1206.262790] env[68638]: INFO nova.compute.manager [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1206.263040] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1206.263236] env[68638]: DEBUG nova.compute.manager [-] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1206.263330] env[68638]: DEBUG nova.network.neutron [-] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1206.500955] env[68638]: DEBUG nova.compute.manager [req-2a747190-9316-4b44-b535-942776b52e3d req-01129475-5610-4b68-8b7a-33363bb91234 service nova] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Received event network-vif-deleted-dfb906bd-6a6e-40d8-aea5-e9c4415dd8e6 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1206.501094] env[68638]: INFO nova.compute.manager [req-2a747190-9316-4b44-b535-942776b52e3d req-01129475-5610-4b68-8b7a-33363bb91234 service nova] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Neutron deleted interface dfb906bd-6a6e-40d8-aea5-e9c4415dd8e6; detaching it from the instance and deleting it from the info cache [ 1206.501267] env[68638]: DEBUG nova.network.neutron [req-2a747190-9316-4b44-b535-942776b52e3d req-01129475-5610-4b68-8b7a-33363bb91234 service nova] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.986149] env[68638]: DEBUG nova.network.neutron [-] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1207.003879] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-79734843-f30c-47ac-b2bb-d3633dfb47c5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.014186] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55163b86-15d8-44ef-a670-842aab7fd809 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.041786] env[68638]: DEBUG nova.compute.manager [req-2a747190-9316-4b44-b535-942776b52e3d req-01129475-5610-4b68-8b7a-33363bb91234 service nova] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Detach interface failed, port_id=dfb906bd-6a6e-40d8-aea5-e9c4415dd8e6, reason: Instance d7fd30c6-3e0b-4564-9369-f29dc59a4d74 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1207.269543] env[68638]: DEBUG nova.compute.manager [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1207.295515] env[68638]: DEBUG nova.virt.hardware [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1207.295776] env[68638]: DEBUG nova.virt.hardware [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1207.295935] env[68638]: DEBUG nova.virt.hardware [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1207.296143] env[68638]: DEBUG nova.virt.hardware [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1207.296297] env[68638]: DEBUG nova.virt.hardware [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1207.296443] env[68638]: DEBUG nova.virt.hardware [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1207.296651] env[68638]: DEBUG nova.virt.hardware [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1207.296811] env[68638]: DEBUG nova.virt.hardware [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1207.296980] env[68638]: DEBUG nova.virt.hardware [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1207.297157] env[68638]: DEBUG nova.virt.hardware [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1207.297329] env[68638]: DEBUG nova.virt.hardware [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1207.298307] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d17fcfc9-0c6a-4684-bd4f-0bc153a8aef6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.308028] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e4db15-edd6-43b4-8aa9-0bb12b591c8b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.380154] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c91292-74ba-4ab3-962e-e1b6bd786248 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.387972] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6fb375b-6795-484a-b143-4209394e5a58 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.419062] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a729529-cab8-4853-8221-277e14b6088a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.428251] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de43bcd2-3dd0-4ead-902e-4d5ee87a8083 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.439646] env[68638]: DEBUG nova.compute.provider_tree [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1207.490994] env[68638]: INFO nova.compute.manager [-] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Took 1.23 seconds to deallocate network for instance. [ 1207.534389] env[68638]: DEBUG nova.compute.manager [req-744a865d-b7fe-4d01-9d4d-8caa33f8a85a req-2a8179c8-c27b-4392-80d1-f2d84159c3c2 service nova] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Received event network-vif-plugged-7c25dd2c-4a3e-4c76-9747-5f4fc5f8d267 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1207.534705] env[68638]: DEBUG oslo_concurrency.lockutils [req-744a865d-b7fe-4d01-9d4d-8caa33f8a85a req-2a8179c8-c27b-4392-80d1-f2d84159c3c2 service nova] Acquiring lock "2452dd7a-5f16-4094-9407-59405eed572b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1207.535029] env[68638]: DEBUG oslo_concurrency.lockutils [req-744a865d-b7fe-4d01-9d4d-8caa33f8a85a req-2a8179c8-c27b-4392-80d1-f2d84159c3c2 service nova] Lock "2452dd7a-5f16-4094-9407-59405eed572b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1207.535239] env[68638]: DEBUG oslo_concurrency.lockutils [req-744a865d-b7fe-4d01-9d4d-8caa33f8a85a req-2a8179c8-c27b-4392-80d1-f2d84159c3c2 service nova] Lock "2452dd7a-5f16-4094-9407-59405eed572b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1207.535528] env[68638]: DEBUG nova.compute.manager [req-744a865d-b7fe-4d01-9d4d-8caa33f8a85a req-2a8179c8-c27b-4392-80d1-f2d84159c3c2 service nova] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] No waiting events found dispatching network-vif-plugged-7c25dd2c-4a3e-4c76-9747-5f4fc5f8d267 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1207.535598] env[68638]: WARNING nova.compute.manager [req-744a865d-b7fe-4d01-9d4d-8caa33f8a85a req-2a8179c8-c27b-4392-80d1-f2d84159c3c2 service nova] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Received unexpected event network-vif-plugged-7c25dd2c-4a3e-4c76-9747-5f4fc5f8d267 for instance with vm_state building and task_state spawning. [ 1207.628217] env[68638]: DEBUG nova.network.neutron [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Successfully updated port: 7c25dd2c-4a3e-4c76-9747-5f4fc5f8d267 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1207.943363] env[68638]: DEBUG nova.scheduler.client.report [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1207.997844] env[68638]: DEBUG oslo_concurrency.lockutils [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1208.130792] env[68638]: DEBUG oslo_concurrency.lockutils [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "refresh_cache-2452dd7a-5f16-4094-9407-59405eed572b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1208.130962] env[68638]: DEBUG oslo_concurrency.lockutils [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired lock "refresh_cache-2452dd7a-5f16-4094-9407-59405eed572b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1208.131081] env[68638]: DEBUG nova.network.neutron [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1208.448232] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.203s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1208.450500] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.802s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1208.450738] env[68638]: DEBUG nova.objects.instance [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lazy-loading 'resources' on Instance uuid a576ba6f-5e3b-4408-b95d-2084a072ec12 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1208.476400] env[68638]: INFO nova.network.neutron [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Updating port 3199e8a3-335c-43ff-be19-3881b85a0203 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1208.663103] env[68638]: DEBUG nova.network.neutron [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1208.792664] env[68638]: DEBUG nova.network.neutron [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Updating instance_info_cache with network_info: [{"id": "7c25dd2c-4a3e-4c76-9747-5f4fc5f8d267", "address": "fa:16:3e:6c:d6:2d", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c25dd2c-4a", "ovs_interfaceid": "7c25dd2c-4a3e-4c76-9747-5f4fc5f8d267", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1209.055862] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c9500e-6918-460e-91d2-97fe97770239 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.064012] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa2fe51a-428e-44e8-9f6b-e52298163a11 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.095203] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7498fc3-aab8-47de-964c-67ce2ccc570b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.102831] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4923795-e158-4309-8bbe-dc6bef68a1b7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.116154] env[68638]: DEBUG nova.compute.provider_tree [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1209.296749] env[68638]: DEBUG oslo_concurrency.lockutils [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Releasing lock "refresh_cache-2452dd7a-5f16-4094-9407-59405eed572b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1209.297245] env[68638]: DEBUG nova.compute.manager [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Instance network_info: |[{"id": "7c25dd2c-4a3e-4c76-9747-5f4fc5f8d267", "address": "fa:16:3e:6c:d6:2d", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c25dd2c-4a", "ovs_interfaceid": "7c25dd2c-4a3e-4c76-9747-5f4fc5f8d267", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1209.297659] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6c:d6:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19598cc1-e105-4565-906a-09dde75e3fbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c25dd2c-4a3e-4c76-9747-5f4fc5f8d267', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1209.306052] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1209.306288] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1209.306515] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-28fb8cc4-9504-487a-a98a-90f3fcc87181 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.327511] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1209.327511] env[68638]: value = "task-2834576" [ 1209.327511] env[68638]: _type = "Task" [ 1209.327511] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.335062] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834576, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.578704] env[68638]: DEBUG nova.compute.manager [req-24e2a243-6d7e-424f-85bd-4666582ebc54 req-433dc3a9-ed29-4a52-9bfa-064fcf82addf service nova] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Received event network-changed-7c25dd2c-4a3e-4c76-9747-5f4fc5f8d267 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1209.579013] env[68638]: DEBUG nova.compute.manager [req-24e2a243-6d7e-424f-85bd-4666582ebc54 req-433dc3a9-ed29-4a52-9bfa-064fcf82addf service nova] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Refreshing instance network info cache due to event network-changed-7c25dd2c-4a3e-4c76-9747-5f4fc5f8d267. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1209.579143] env[68638]: DEBUG oslo_concurrency.lockutils [req-24e2a243-6d7e-424f-85bd-4666582ebc54 req-433dc3a9-ed29-4a52-9bfa-064fcf82addf service nova] Acquiring lock "refresh_cache-2452dd7a-5f16-4094-9407-59405eed572b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1209.579287] env[68638]: DEBUG oslo_concurrency.lockutils [req-24e2a243-6d7e-424f-85bd-4666582ebc54 req-433dc3a9-ed29-4a52-9bfa-064fcf82addf service nova] Acquired lock "refresh_cache-2452dd7a-5f16-4094-9407-59405eed572b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1209.579448] env[68638]: DEBUG nova.network.neutron [req-24e2a243-6d7e-424f-85bd-4666582ebc54 req-433dc3a9-ed29-4a52-9bfa-064fcf82addf service nova] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Refreshing network info cache for port 7c25dd2c-4a3e-4c76-9747-5f4fc5f8d267 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1209.619190] env[68638]: DEBUG nova.scheduler.client.report [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1209.838292] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834576, 'name': CreateVM_Task, 'duration_secs': 0.291756} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.838537] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1209.839782] env[68638]: DEBUG oslo_concurrency.lockutils [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1209.839899] env[68638]: DEBUG oslo_concurrency.lockutils [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1209.840195] env[68638]: DEBUG oslo_concurrency.lockutils [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1209.840464] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40a3bde3-b3a3-4920-a6e8-80efcbccd80f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.845868] env[68638]: DEBUG oslo_vmware.api [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1209.845868] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5203c628-9181-3897-70cb-e853f3358f86" [ 1209.845868] env[68638]: _type = "Task" [ 1209.845868] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.854794] env[68638]: DEBUG oslo_vmware.api [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5203c628-9181-3897-70cb-e853f3358f86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.123591] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.673s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1210.126242] env[68638]: DEBUG oslo_concurrency.lockutils [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.171s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1210.127901] env[68638]: INFO nova.compute.claims [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1210.165515] env[68638]: INFO nova.scheduler.client.report [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Deleted allocations for instance a576ba6f-5e3b-4408-b95d-2084a072ec12 [ 1210.196365] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "refresh_cache-25c35c36-71c9-48cd-b7e4-6293eef890e5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1210.196365] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquired lock "refresh_cache-25c35c36-71c9-48cd-b7e4-6293eef890e5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1210.196563] env[68638]: DEBUG nova.network.neutron [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1210.314141] env[68638]: DEBUG nova.network.neutron [req-24e2a243-6d7e-424f-85bd-4666582ebc54 req-433dc3a9-ed29-4a52-9bfa-064fcf82addf service nova] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Updated VIF entry in instance network info cache for port 7c25dd2c-4a3e-4c76-9747-5f4fc5f8d267. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1210.314627] env[68638]: DEBUG nova.network.neutron [req-24e2a243-6d7e-424f-85bd-4666582ebc54 req-433dc3a9-ed29-4a52-9bfa-064fcf82addf service nova] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Updating instance_info_cache with network_info: [{"id": "7c25dd2c-4a3e-4c76-9747-5f4fc5f8d267", "address": "fa:16:3e:6c:d6:2d", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c25dd2c-4a", "ovs_interfaceid": "7c25dd2c-4a3e-4c76-9747-5f4fc5f8d267", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.358243] env[68638]: DEBUG oslo_vmware.api [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5203c628-9181-3897-70cb-e853f3358f86, 'name': SearchDatastore_Task, 'duration_secs': 0.010558} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.358545] env[68638]: DEBUG oslo_concurrency.lockutils [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1210.358782] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1210.359014] env[68638]: DEBUG oslo_concurrency.lockutils [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1210.359180] env[68638]: DEBUG oslo_concurrency.lockutils [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1210.359474] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1210.360173] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1cf112ea-1d5e-447e-86a2-aa058338f66d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.368377] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1210.369061] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1210.369426] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3d1a412-c9e7-434f-96cf-5be02ea4a806 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.374589] env[68638]: DEBUG oslo_vmware.api [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1210.374589] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52016519-81f8-bd5d-a31e-ac9c1b0c2dcc" [ 1210.374589] env[68638]: _type = "Task" [ 1210.374589] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.384480] env[68638]: DEBUG oslo_vmware.api [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52016519-81f8-bd5d-a31e-ac9c1b0c2dcc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.673850] env[68638]: DEBUG oslo_concurrency.lockutils [None req-1a0fe7f6-fe7d-4631-9893-afc1f77861d6 tempest-ServersNegativeTestJSON-75032751 tempest-ServersNegativeTestJSON-75032751-project-member] Lock "a576ba6f-5e3b-4408-b95d-2084a072ec12" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.464s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1210.819609] env[68638]: DEBUG oslo_concurrency.lockutils [req-24e2a243-6d7e-424f-85bd-4666582ebc54 req-433dc3a9-ed29-4a52-9bfa-064fcf82addf service nova] Releasing lock "refresh_cache-2452dd7a-5f16-4094-9407-59405eed572b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1210.885805] env[68638]: DEBUG oslo_vmware.api [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52016519-81f8-bd5d-a31e-ac9c1b0c2dcc, 'name': SearchDatastore_Task, 'duration_secs': 0.009649} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.886077] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-578139b9-fcf1-468c-837f-6605d25c007b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.892875] env[68638]: DEBUG oslo_vmware.api [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1210.892875] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5254f876-cfe5-d24c-f08e-67b1ca2eb4d6" [ 1210.892875] env[68638]: _type = "Task" [ 1210.892875] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.901049] env[68638]: DEBUG oslo_vmware.api [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5254f876-cfe5-d24c-f08e-67b1ca2eb4d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.942165] env[68638]: DEBUG nova.network.neutron [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Updating instance_info_cache with network_info: [{"id": "3199e8a3-335c-43ff-be19-3881b85a0203", "address": "fa:16:3e:e5:0e:d2", "network": {"id": "4723bf0f-71b1-4997-b6ce-06f7dbda84df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-488516254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee5d59c43e974d04ba56981f2716ff60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3199e8a3-33", "ovs_interfaceid": "3199e8a3-335c-43ff-be19-3881b85a0203", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1211.237831] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba1761a-b04a-4cd1-9d89-a64e37bd4440 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.245801] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-732e1ce2-3e54-4467-8876-11f8b1b8523e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.278744] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd7629f-e46c-462c-a0e2-953d161ff1e0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.286260] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815cc786-f908-4e2b-92e1-7ee909b360fa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.299168] env[68638]: DEBUG nova.compute.provider_tree [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1211.402114] env[68638]: DEBUG oslo_vmware.api [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5254f876-cfe5-d24c-f08e-67b1ca2eb4d6, 'name': SearchDatastore_Task, 'duration_secs': 0.009276} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.402401] env[68638]: DEBUG oslo_concurrency.lockutils [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1211.402657] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 2452dd7a-5f16-4094-9407-59405eed572b/2452dd7a-5f16-4094-9407-59405eed572b.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1211.403175] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e5e0c36-1ba9-4aad-9297-6630a399350a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.409444] env[68638]: DEBUG oslo_vmware.api [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1211.409444] env[68638]: value = "task-2834577" [ 1211.409444] env[68638]: _type = "Task" [ 1211.409444] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.416736] env[68638]: DEBUG oslo_vmware.api [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834577, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.444864] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Releasing lock "refresh_cache-25c35c36-71c9-48cd-b7e4-6293eef890e5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1211.471851] env[68638]: DEBUG nova.virt.hardware [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='cd447bd5b8293eece4e4f80c2159cf9b',container_format='bare',created_at=2025-03-07T02:38:10Z,direct_url=,disk_format='vmdk',id=629cabe9-6521-40cd-a0d0-8bc57ab4999b,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-394988180-shelved',owner='ee5d59c43e974d04ba56981f2716ff60',properties=ImageMetaProps,protected=,size=31669760,status='active',tags=,updated_at=2025-03-07T02:38:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1211.472190] env[68638]: DEBUG nova.virt.hardware [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1211.472357] env[68638]: DEBUG nova.virt.hardware [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1211.472540] env[68638]: DEBUG nova.virt.hardware [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1211.472680] env[68638]: DEBUG nova.virt.hardware [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1211.472898] env[68638]: DEBUG nova.virt.hardware [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1211.473049] env[68638]: DEBUG nova.virt.hardware [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1211.473219] env[68638]: DEBUG nova.virt.hardware [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1211.473388] env[68638]: DEBUG nova.virt.hardware [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1211.473550] env[68638]: DEBUG nova.virt.hardware [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1211.473722] env[68638]: DEBUG nova.virt.hardware [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1211.474580] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-342b2a8c-a884-47a9-a930-c272d8c2f08a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.483595] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98afb792-88f1-49e6-9039-975f063bbb02 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.496849] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:0e:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd1da5fc2-0280-4f76-ac97-20ea4bc7bb16', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3199e8a3-335c-43ff-be19-3881b85a0203', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1211.504224] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1211.504466] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1211.504681] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2943e388-d010-4f4e-ae8f-123e84d2ced7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.524206] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1211.524206] env[68638]: value = "task-2834578" [ 1211.524206] env[68638]: _type = "Task" [ 1211.524206] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.532254] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834578, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.623956] env[68638]: DEBUG nova.compute.manager [req-189a7683-6a23-4774-bb05-36e544762108 req-70360403-e4ec-44b0-a1cc-71e0330f1976 service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Received event network-vif-plugged-3199e8a3-335c-43ff-be19-3881b85a0203 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1211.624219] env[68638]: DEBUG oslo_concurrency.lockutils [req-189a7683-6a23-4774-bb05-36e544762108 req-70360403-e4ec-44b0-a1cc-71e0330f1976 service nova] Acquiring lock "25c35c36-71c9-48cd-b7e4-6293eef890e5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1211.624435] env[68638]: DEBUG oslo_concurrency.lockutils [req-189a7683-6a23-4774-bb05-36e544762108 req-70360403-e4ec-44b0-a1cc-71e0330f1976 service nova] Lock "25c35c36-71c9-48cd-b7e4-6293eef890e5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1211.624603] env[68638]: DEBUG oslo_concurrency.lockutils [req-189a7683-6a23-4774-bb05-36e544762108 req-70360403-e4ec-44b0-a1cc-71e0330f1976 service nova] Lock "25c35c36-71c9-48cd-b7e4-6293eef890e5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1211.624773] env[68638]: DEBUG nova.compute.manager [req-189a7683-6a23-4774-bb05-36e544762108 req-70360403-e4ec-44b0-a1cc-71e0330f1976 service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] No waiting events found dispatching network-vif-plugged-3199e8a3-335c-43ff-be19-3881b85a0203 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1211.624938] env[68638]: WARNING nova.compute.manager [req-189a7683-6a23-4774-bb05-36e544762108 req-70360403-e4ec-44b0-a1cc-71e0330f1976 service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Received unexpected event network-vif-plugged-3199e8a3-335c-43ff-be19-3881b85a0203 for instance with vm_state shelved_offloaded and task_state spawning. [ 1211.625253] env[68638]: DEBUG nova.compute.manager [req-189a7683-6a23-4774-bb05-36e544762108 req-70360403-e4ec-44b0-a1cc-71e0330f1976 service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Received event network-changed-3199e8a3-335c-43ff-be19-3881b85a0203 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1211.625502] env[68638]: DEBUG nova.compute.manager [req-189a7683-6a23-4774-bb05-36e544762108 req-70360403-e4ec-44b0-a1cc-71e0330f1976 service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Refreshing instance network info cache due to event network-changed-3199e8a3-335c-43ff-be19-3881b85a0203. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1211.625710] env[68638]: DEBUG oslo_concurrency.lockutils [req-189a7683-6a23-4774-bb05-36e544762108 req-70360403-e4ec-44b0-a1cc-71e0330f1976 service nova] Acquiring lock "refresh_cache-25c35c36-71c9-48cd-b7e4-6293eef890e5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1211.625852] env[68638]: DEBUG oslo_concurrency.lockutils [req-189a7683-6a23-4774-bb05-36e544762108 req-70360403-e4ec-44b0-a1cc-71e0330f1976 service nova] Acquired lock "refresh_cache-25c35c36-71c9-48cd-b7e4-6293eef890e5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1211.626018] env[68638]: DEBUG nova.network.neutron [req-189a7683-6a23-4774-bb05-36e544762108 req-70360403-e4ec-44b0-a1cc-71e0330f1976 service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Refreshing network info cache for port 3199e8a3-335c-43ff-be19-3881b85a0203 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1211.803853] env[68638]: DEBUG nova.scheduler.client.report [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1211.922409] env[68638]: DEBUG oslo_vmware.api [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834577, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.035841] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834578, 'name': CreateVM_Task, 'duration_secs': 0.45397} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.035841] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1212.036159] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/629cabe9-6521-40cd-a0d0-8bc57ab4999b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.036377] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/629cabe9-6521-40cd-a0d0-8bc57ab4999b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1212.036781] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/629cabe9-6521-40cd-a0d0-8bc57ab4999b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1212.037114] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-002b210a-8fec-45a2-94f0-34315cce5f90 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.041699] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1212.041699] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ce94b7-31f6-a723-136d-6a7ca2e02cf7" [ 1212.041699] env[68638]: _type = "Task" [ 1212.041699] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.051714] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ce94b7-31f6-a723-136d-6a7ca2e02cf7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.309106] env[68638]: DEBUG oslo_concurrency.lockutils [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.183s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1212.309621] env[68638]: DEBUG nova.compute.manager [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1212.317021] env[68638]: DEBUG oslo_concurrency.lockutils [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.316s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1212.317021] env[68638]: DEBUG nova.objects.instance [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lazy-loading 'resources' on Instance uuid d7fd30c6-3e0b-4564-9369-f29dc59a4d74 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1212.387495] env[68638]: DEBUG nova.network.neutron [req-189a7683-6a23-4774-bb05-36e544762108 req-70360403-e4ec-44b0-a1cc-71e0330f1976 service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Updated VIF entry in instance network info cache for port 3199e8a3-335c-43ff-be19-3881b85a0203. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1212.387956] env[68638]: DEBUG nova.network.neutron [req-189a7683-6a23-4774-bb05-36e544762108 req-70360403-e4ec-44b0-a1cc-71e0330f1976 service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Updating instance_info_cache with network_info: [{"id": "3199e8a3-335c-43ff-be19-3881b85a0203", "address": "fa:16:3e:e5:0e:d2", "network": {"id": "4723bf0f-71b1-4997-b6ce-06f7dbda84df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-488516254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ee5d59c43e974d04ba56981f2716ff60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3199e8a3-33", "ovs_interfaceid": "3199e8a3-335c-43ff-be19-3881b85a0203", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1212.420287] env[68638]: DEBUG oslo_vmware.api [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834577, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.54067} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.420568] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 2452dd7a-5f16-4094-9407-59405eed572b/2452dd7a-5f16-4094-9407-59405eed572b.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1212.420786] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1212.421052] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f944ed9d-72f4-4dca-90df-36b3e9d6a1e7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.427261] env[68638]: DEBUG oslo_vmware.api [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1212.427261] env[68638]: value = "task-2834579" [ 1212.427261] env[68638]: _type = "Task" [ 1212.427261] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.435407] env[68638]: DEBUG oslo_vmware.api [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834579, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.554182] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/629cabe9-6521-40cd-a0d0-8bc57ab4999b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1212.554499] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Processing image 629cabe9-6521-40cd-a0d0-8bc57ab4999b {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1212.554777] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/629cabe9-6521-40cd-a0d0-8bc57ab4999b/629cabe9-6521-40cd-a0d0-8bc57ab4999b.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.554971] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/629cabe9-6521-40cd-a0d0-8bc57ab4999b/629cabe9-6521-40cd-a0d0-8bc57ab4999b.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1212.555206] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1212.555496] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dfecc708-07f1-42f1-aa5b-8c14c7c68182 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.564813] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1212.564881] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1212.567156] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f37fe009-e081-42ee-83d1-449eb9e7a2e4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.572884] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1212.572884] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5249ee5d-0b61-48e5-1c9d-e0659df8ac2e" [ 1212.572884] env[68638]: _type = "Task" [ 1212.572884] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.580718] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5249ee5d-0b61-48e5-1c9d-e0659df8ac2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.816726] env[68638]: DEBUG nova.compute.utils [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1212.818268] env[68638]: DEBUG nova.compute.manager [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1212.818439] env[68638]: DEBUG nova.network.neutron [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1212.869708] env[68638]: DEBUG nova.policy [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c682afd2c9c848ee923e798a78164294', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c7ed0f3027384a4c8f3cf2e4ed0c6b16', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1212.892958] env[68638]: DEBUG oslo_concurrency.lockutils [req-189a7683-6a23-4774-bb05-36e544762108 req-70360403-e4ec-44b0-a1cc-71e0330f1976 service nova] Releasing lock "refresh_cache-25c35c36-71c9-48cd-b7e4-6293eef890e5" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1212.925469] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a868c58-7c50-45c0-b3b3-5490ad29b262 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.938256] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2590e631-1936-44ec-9fe7-2fea81e3827c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.941220] env[68638]: DEBUG oslo_vmware.api [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834579, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068294} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.941480] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1212.942550] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2402d5b4-70ef-4b36-bc3f-213199b12976 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.970769] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86969f88-cf29-4721-9f25-5abf5f9fb9e9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.990623] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] 2452dd7a-5f16-4094-9407-59405eed572b/2452dd7a-5f16-4094-9407-59405eed572b.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1212.991232] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45b20eda-d626-4ac3-850a-2d8b2b51128c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.011085] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-211992c1-53f4-4c1d-a893-af70388ef1f1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.013802] env[68638]: DEBUG oslo_vmware.api [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1213.013802] env[68638]: value = "task-2834580" [ 1213.013802] env[68638]: _type = "Task" [ 1213.013802] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.023979] env[68638]: DEBUG nova.compute.provider_tree [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1213.029646] env[68638]: DEBUG oslo_vmware.api [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834580, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.082913] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Preparing fetch location {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1213.083201] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Fetch image to [datastore1] OSTACK_IMG_1fb05ba6-a2fb-4371-9785-6121ee5c71c0/OSTACK_IMG_1fb05ba6-a2fb-4371-9785-6121ee5c71c0.vmdk {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1213.083415] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Downloading stream optimized image 629cabe9-6521-40cd-a0d0-8bc57ab4999b to [datastore1] OSTACK_IMG_1fb05ba6-a2fb-4371-9785-6121ee5c71c0/OSTACK_IMG_1fb05ba6-a2fb-4371-9785-6121ee5c71c0.vmdk on the data store datastore1 as vApp {{(pid=68638) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1213.083599] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Downloading image file data 629cabe9-6521-40cd-a0d0-8bc57ab4999b to the ESX as VM named 'OSTACK_IMG_1fb05ba6-a2fb-4371-9785-6121ee5c71c0' {{(pid=68638) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1213.139439] env[68638]: DEBUG nova.network.neutron [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Successfully created port: 2e2b1e44-37eb-48ac-be57-6099eb8cc84f {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1213.168331] env[68638]: DEBUG oslo_vmware.rw_handles [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1213.168331] env[68638]: value = "resgroup-9" [ 1213.168331] env[68638]: _type = "ResourcePool" [ 1213.168331] env[68638]: }. {{(pid=68638) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1213.168644] env[68638]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-a4f4e7a1-9f42-476f-9b34-d81aae3569c5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.190941] env[68638]: DEBUG oslo_vmware.rw_handles [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lease: (returnval){ [ 1213.190941] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5266241e-e4e0-eb1c-7fb3-f47554df624e" [ 1213.190941] env[68638]: _type = "HttpNfcLease" [ 1213.190941] env[68638]: } obtained for vApp import into resource pool (val){ [ 1213.190941] env[68638]: value = "resgroup-9" [ 1213.190941] env[68638]: _type = "ResourcePool" [ 1213.190941] env[68638]: }. {{(pid=68638) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1213.191555] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the lease: (returnval){ [ 1213.191555] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5266241e-e4e0-eb1c-7fb3-f47554df624e" [ 1213.191555] env[68638]: _type = "HttpNfcLease" [ 1213.191555] env[68638]: } to be ready. {{(pid=68638) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1213.197556] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1213.197556] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5266241e-e4e0-eb1c-7fb3-f47554df624e" [ 1213.197556] env[68638]: _type = "HttpNfcLease" [ 1213.197556] env[68638]: } is initializing. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1213.324056] env[68638]: DEBUG nova.compute.manager [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1213.524496] env[68638]: DEBUG oslo_vmware.api [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834580, 'name': ReconfigVM_Task, 'duration_secs': 0.287648} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.524852] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Reconfigured VM instance instance-00000077 to attach disk [datastore1] 2452dd7a-5f16-4094-9407-59405eed572b/2452dd7a-5f16-4094-9407-59405eed572b.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1213.525716] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ea000797-ca92-4cba-9e2f-838722f66317 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.528280] env[68638]: DEBUG nova.scheduler.client.report [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1213.537541] env[68638]: DEBUG oslo_vmware.api [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1213.537541] env[68638]: value = "task-2834582" [ 1213.537541] env[68638]: _type = "Task" [ 1213.537541] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.546821] env[68638]: DEBUG oslo_vmware.api [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834582, 'name': Rename_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.700364] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1213.700364] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5266241e-e4e0-eb1c-7fb3-f47554df624e" [ 1213.700364] env[68638]: _type = "HttpNfcLease" [ 1213.700364] env[68638]: } is ready. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1213.700634] env[68638]: DEBUG oslo_vmware.rw_handles [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1213.700634] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5266241e-e4e0-eb1c-7fb3-f47554df624e" [ 1213.700634] env[68638]: _type = "HttpNfcLease" [ 1213.700634] env[68638]: }. {{(pid=68638) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1213.701373] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d163b571-e61a-4ca4-8c06-4a04d49e66ff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.708224] env[68638]: DEBUG oslo_vmware.rw_handles [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523a61c9-70ee-1f69-c9b5-cf910c2b9a93/disk-0.vmdk from lease info. {{(pid=68638) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1213.708404] env[68638]: DEBUG oslo_vmware.rw_handles [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Creating HTTP connection to write to file with size = 31669760 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523a61c9-70ee-1f69-c9b5-cf910c2b9a93/disk-0.vmdk. {{(pid=68638) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1213.771467] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-482dbd87-5680-47b3-b95d-4cabb3783e2c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.033885] env[68638]: DEBUG oslo_concurrency.lockutils [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.720s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.047056] env[68638]: DEBUG oslo_vmware.api [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834582, 'name': Rename_Task, 'duration_secs': 0.131457} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.047347] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1214.047592] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-60b4c689-d17a-44b4-9351-f8dde218e0fb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.053775] env[68638]: DEBUG oslo_vmware.api [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1214.053775] env[68638]: value = "task-2834583" [ 1214.053775] env[68638]: _type = "Task" [ 1214.053775] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.054690] env[68638]: INFO nova.scheduler.client.report [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Deleted allocations for instance d7fd30c6-3e0b-4564-9369-f29dc59a4d74 [ 1214.065858] env[68638]: DEBUG oslo_vmware.api [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834583, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.333688] env[68638]: DEBUG nova.compute.manager [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1214.362978] env[68638]: DEBUG nova.virt.hardware [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1214.363264] env[68638]: DEBUG nova.virt.hardware [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1214.363502] env[68638]: DEBUG nova.virt.hardware [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1214.363714] env[68638]: DEBUG nova.virt.hardware [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1214.363879] env[68638]: DEBUG nova.virt.hardware [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1214.364051] env[68638]: DEBUG nova.virt.hardware [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1214.364369] env[68638]: DEBUG nova.virt.hardware [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1214.365011] env[68638]: DEBUG nova.virt.hardware [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1214.365011] env[68638]: DEBUG nova.virt.hardware [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1214.365011] env[68638]: DEBUG nova.virt.hardware [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1214.365239] env[68638]: DEBUG nova.virt.hardware [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1214.366730] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd00ed2-3de3-4eec-a930-d3f1cc93f3ea {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.380638] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff7832d2-ad7d-4e4e-bce9-4dd31f46ba87 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.523754] env[68638]: DEBUG nova.compute.manager [req-08d5bd7d-971a-482e-9755-819c11a8b245 req-0e16658e-cf28-434a-9025-2dbf529377fe service nova] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Received event network-vif-plugged-2e2b1e44-37eb-48ac-be57-6099eb8cc84f {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1214.524098] env[68638]: DEBUG oslo_concurrency.lockutils [req-08d5bd7d-971a-482e-9755-819c11a8b245 req-0e16658e-cf28-434a-9025-2dbf529377fe service nova] Acquiring lock "4db12faa-4c35-42ae-add5-19372e1d8807-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1214.524361] env[68638]: DEBUG oslo_concurrency.lockutils [req-08d5bd7d-971a-482e-9755-819c11a8b245 req-0e16658e-cf28-434a-9025-2dbf529377fe service nova] Lock "4db12faa-4c35-42ae-add5-19372e1d8807-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1214.524601] env[68638]: DEBUG oslo_concurrency.lockutils [req-08d5bd7d-971a-482e-9755-819c11a8b245 req-0e16658e-cf28-434a-9025-2dbf529377fe service nova] Lock "4db12faa-4c35-42ae-add5-19372e1d8807-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.524771] env[68638]: DEBUG nova.compute.manager [req-08d5bd7d-971a-482e-9755-819c11a8b245 req-0e16658e-cf28-434a-9025-2dbf529377fe service nova] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] No waiting events found dispatching network-vif-plugged-2e2b1e44-37eb-48ac-be57-6099eb8cc84f {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1214.525021] env[68638]: WARNING nova.compute.manager [req-08d5bd7d-971a-482e-9755-819c11a8b245 req-0e16658e-cf28-434a-9025-2dbf529377fe service nova] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Received unexpected event network-vif-plugged-2e2b1e44-37eb-48ac-be57-6099eb8cc84f for instance with vm_state building and task_state spawning. [ 1214.567300] env[68638]: DEBUG oslo_concurrency.lockutils [None req-736ed161-efa0-4ad6-969c-a638a72b5d29 tempest-ServerDiskConfigTestJSON-919905860 tempest-ServerDiskConfigTestJSON-919905860-project-member] Lock "d7fd30c6-3e0b-4564-9369-f29dc59a4d74" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.934s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.571360] env[68638]: DEBUG nova.network.neutron [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Successfully updated port: 2e2b1e44-37eb-48ac-be57-6099eb8cc84f {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1214.572645] env[68638]: DEBUG oslo_vmware.api [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834583, 'name': PowerOnVM_Task, 'duration_secs': 0.499556} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.574592] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1214.574807] env[68638]: INFO nova.compute.manager [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Took 7.31 seconds to spawn the instance on the hypervisor. [ 1214.574980] env[68638]: DEBUG nova.compute.manager [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1214.575910] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-362e8878-ecda-4491-94c2-cfe375583741 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.598387] env[68638]: DEBUG nova.compute.manager [req-ea4c9958-b90d-4bc5-b95f-485c9ce0ec84 req-a01c6296-b40e-4f6d-9562-ebb85be35d6f service nova] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Received event network-changed-2e2b1e44-37eb-48ac-be57-6099eb8cc84f {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1214.598747] env[68638]: DEBUG nova.compute.manager [req-ea4c9958-b90d-4bc5-b95f-485c9ce0ec84 req-a01c6296-b40e-4f6d-9562-ebb85be35d6f service nova] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Refreshing instance network info cache due to event network-changed-2e2b1e44-37eb-48ac-be57-6099eb8cc84f. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1214.599715] env[68638]: DEBUG oslo_concurrency.lockutils [req-ea4c9958-b90d-4bc5-b95f-485c9ce0ec84 req-a01c6296-b40e-4f6d-9562-ebb85be35d6f service nova] Acquiring lock "refresh_cache-4db12faa-4c35-42ae-add5-19372e1d8807" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1214.599977] env[68638]: DEBUG oslo_concurrency.lockutils [req-ea4c9958-b90d-4bc5-b95f-485c9ce0ec84 req-a01c6296-b40e-4f6d-9562-ebb85be35d6f service nova] Acquired lock "refresh_cache-4db12faa-4c35-42ae-add5-19372e1d8807" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1214.600251] env[68638]: DEBUG nova.network.neutron [req-ea4c9958-b90d-4bc5-b95f-485c9ce0ec84 req-a01c6296-b40e-4f6d-9562-ebb85be35d6f service nova] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Refreshing network info cache for port 2e2b1e44-37eb-48ac-be57-6099eb8cc84f {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1215.075329] env[68638]: DEBUG oslo_concurrency.lockutils [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Acquiring lock "refresh_cache-4db12faa-4c35-42ae-add5-19372e1d8807" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1215.094707] env[68638]: INFO nova.compute.manager [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Took 14.30 seconds to build instance. [ 1215.139230] env[68638]: DEBUG nova.network.neutron [req-ea4c9958-b90d-4bc5-b95f-485c9ce0ec84 req-a01c6296-b40e-4f6d-9562-ebb85be35d6f service nova] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1215.174957] env[68638]: DEBUG oslo_vmware.rw_handles [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Completed reading data from the image iterator. {{(pid=68638) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1215.175231] env[68638]: DEBUG oslo_vmware.rw_handles [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523a61c9-70ee-1f69-c9b5-cf910c2b9a93/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1215.177311] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c0b8703-3401-4422-9eb5-36e5f921b18a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.184422] env[68638]: DEBUG oslo_vmware.rw_handles [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523a61c9-70ee-1f69-c9b5-cf910c2b9a93/disk-0.vmdk is in state: ready. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1215.184771] env[68638]: DEBUG oslo_vmware.rw_handles [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523a61c9-70ee-1f69-c9b5-cf910c2b9a93/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1215.184984] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-3488ae96-89a6-4fc5-9c0f-e8275f31a171 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.316539] env[68638]: DEBUG nova.network.neutron [req-ea4c9958-b90d-4bc5-b95f-485c9ce0ec84 req-a01c6296-b40e-4f6d-9562-ebb85be35d6f service nova] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1215.596499] env[68638]: DEBUG oslo_concurrency.lockutils [None req-58ec3c15-04dd-46fc-aa35-a87444eaa69d tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "2452dd7a-5f16-4094-9407-59405eed572b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.814s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1215.620718] env[68638]: DEBUG oslo_vmware.rw_handles [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523a61c9-70ee-1f69-c9b5-cf910c2b9a93/disk-0.vmdk. {{(pid=68638) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1215.620951] env[68638]: INFO nova.virt.vmwareapi.images [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Downloaded image file data 629cabe9-6521-40cd-a0d0-8bc57ab4999b [ 1215.621794] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19463fb-33e0-4ca3-90b8-7e1905d2ddfd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.638474] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cc82e1be-2c03-4475-bec0-f2d9e5b49caf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.752114] env[68638]: INFO nova.virt.vmwareapi.images [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] The imported VM was unregistered [ 1215.754416] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Caching image {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1215.754647] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Creating directory with path [datastore1] devstack-image-cache_base/629cabe9-6521-40cd-a0d0-8bc57ab4999b {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1215.754914] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-48c7ea87-8c2b-4084-b6bc-718cc8ae8a29 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.766350] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Created directory with path [datastore1] devstack-image-cache_base/629cabe9-6521-40cd-a0d0-8bc57ab4999b {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1215.766546] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_1fb05ba6-a2fb-4371-9785-6121ee5c71c0/OSTACK_IMG_1fb05ba6-a2fb-4371-9785-6121ee5c71c0.vmdk to [datastore1] devstack-image-cache_base/629cabe9-6521-40cd-a0d0-8bc57ab4999b/629cabe9-6521-40cd-a0d0-8bc57ab4999b.vmdk. {{(pid=68638) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1215.766793] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-c222d39c-26df-46ad-b661-c27f9dd366ab {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.772951] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1215.772951] env[68638]: value = "task-2834585" [ 1215.772951] env[68638]: _type = "Task" [ 1215.772951] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.780420] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834585, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.819235] env[68638]: DEBUG oslo_concurrency.lockutils [req-ea4c9958-b90d-4bc5-b95f-485c9ce0ec84 req-a01c6296-b40e-4f6d-9562-ebb85be35d6f service nova] Releasing lock "refresh_cache-4db12faa-4c35-42ae-add5-19372e1d8807" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1215.819657] env[68638]: DEBUG oslo_concurrency.lockutils [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Acquired lock "refresh_cache-4db12faa-4c35-42ae-add5-19372e1d8807" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1215.819832] env[68638]: DEBUG nova.network.neutron [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1216.287132] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834585, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.643663] env[68638]: DEBUG nova.network.neutron [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1216.657262] env[68638]: DEBUG nova.compute.manager [req-098ace25-ba26-4c9a-b4db-428ad9a0ba61 req-f1765356-4efd-4e8f-bf7c-338314330e7e service nova] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Received event network-changed-7c25dd2c-4a3e-4c76-9747-5f4fc5f8d267 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1216.657262] env[68638]: DEBUG nova.compute.manager [req-098ace25-ba26-4c9a-b4db-428ad9a0ba61 req-f1765356-4efd-4e8f-bf7c-338314330e7e service nova] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Refreshing instance network info cache due to event network-changed-7c25dd2c-4a3e-4c76-9747-5f4fc5f8d267. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1216.657262] env[68638]: DEBUG oslo_concurrency.lockutils [req-098ace25-ba26-4c9a-b4db-428ad9a0ba61 req-f1765356-4efd-4e8f-bf7c-338314330e7e service nova] Acquiring lock "refresh_cache-2452dd7a-5f16-4094-9407-59405eed572b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1216.657262] env[68638]: DEBUG oslo_concurrency.lockutils [req-098ace25-ba26-4c9a-b4db-428ad9a0ba61 req-f1765356-4efd-4e8f-bf7c-338314330e7e service nova] Acquired lock "refresh_cache-2452dd7a-5f16-4094-9407-59405eed572b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1216.657262] env[68638]: DEBUG nova.network.neutron [req-098ace25-ba26-4c9a-b4db-428ad9a0ba61 req-f1765356-4efd-4e8f-bf7c-338314330e7e service nova] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Refreshing network info cache for port 7c25dd2c-4a3e-4c76-9747-5f4fc5f8d267 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1216.790402] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834585, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.914349] env[68638]: DEBUG nova.network.neutron [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Updating instance_info_cache with network_info: [{"id": "2e2b1e44-37eb-48ac-be57-6099eb8cc84f", "address": "fa:16:3e:6c:f6:ec", "network": {"id": "420d4e50-d33d-4709-b12e-e3d8f711e646", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-940403807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7ed0f3027384a4c8f3cf2e4ed0c6b16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2f5e5e2-e460-49ce-aa24-232e4a8007af", "external-id": "nsx-vlan-transportzone-503", "segmentation_id": 503, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e2b1e44-37", "ovs_interfaceid": "2e2b1e44-37eb-48ac-be57-6099eb8cc84f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1217.290571] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834585, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.419608] env[68638]: DEBUG oslo_concurrency.lockutils [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Releasing lock "refresh_cache-4db12faa-4c35-42ae-add5-19372e1d8807" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1217.420174] env[68638]: DEBUG nova.compute.manager [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Instance network_info: |[{"id": "2e2b1e44-37eb-48ac-be57-6099eb8cc84f", "address": "fa:16:3e:6c:f6:ec", "network": {"id": "420d4e50-d33d-4709-b12e-e3d8f711e646", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-940403807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7ed0f3027384a4c8f3cf2e4ed0c6b16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2f5e5e2-e460-49ce-aa24-232e4a8007af", "external-id": "nsx-vlan-transportzone-503", "segmentation_id": 503, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e2b1e44-37", "ovs_interfaceid": "2e2b1e44-37eb-48ac-be57-6099eb8cc84f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1217.420687] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6c:f6:ec', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd2f5e5e2-e460-49ce-aa24-232e4a8007af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2e2b1e44-37eb-48ac-be57-6099eb8cc84f', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1217.429057] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Creating folder: Project (c7ed0f3027384a4c8f3cf2e4ed0c6b16). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1217.432383] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-759980ca-e4ac-402f-9398-03901d38a57f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.444786] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Created folder: Project (c7ed0f3027384a4c8f3cf2e4ed0c6b16) in parent group-v569734. [ 1217.445097] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Creating folder: Instances. Parent ref: group-v570056. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1217.445293] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32977656-469f-42f4-98c4-02d194a9323d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.456290] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Created folder: Instances in parent group-v570056. [ 1217.456543] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1217.456766] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1217.457228] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb9c6733-dd27-48de-ad8a-4e4b015fa6ad {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.477757] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1217.477757] env[68638]: value = "task-2834588" [ 1217.477757] env[68638]: _type = "Task" [ 1217.477757] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.488968] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834588, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.594795] env[68638]: DEBUG nova.network.neutron [req-098ace25-ba26-4c9a-b4db-428ad9a0ba61 req-f1765356-4efd-4e8f-bf7c-338314330e7e service nova] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Updated VIF entry in instance network info cache for port 7c25dd2c-4a3e-4c76-9747-5f4fc5f8d267. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1217.595235] env[68638]: DEBUG nova.network.neutron [req-098ace25-ba26-4c9a-b4db-428ad9a0ba61 req-f1765356-4efd-4e8f-bf7c-338314330e7e service nova] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Updating instance_info_cache with network_info: [{"id": "7c25dd2c-4a3e-4c76-9747-5f4fc5f8d267", "address": "fa:16:3e:6c:d6:2d", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c25dd2c-4a", "ovs_interfaceid": "7c25dd2c-4a3e-4c76-9747-5f4fc5f8d267", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1217.790595] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834585, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.992383] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834588, 'name': CreateVM_Task, 'duration_secs': 0.346352} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.992570] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1217.993392] env[68638]: DEBUG oslo_concurrency.lockutils [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1217.993621] env[68638]: DEBUG oslo_concurrency.lockutils [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1217.994019] env[68638]: DEBUG oslo_concurrency.lockutils [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1217.994305] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-998e0575-a1ee-4fcf-8985-486863667710 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.001912] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Waiting for the task: (returnval){ [ 1218.001912] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52486ac2-cfe4-998b-4116-7ccff5831f04" [ 1218.001912] env[68638]: _type = "Task" [ 1218.001912] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.012338] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52486ac2-cfe4-998b-4116-7ccff5831f04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.052393] env[68638]: DEBUG oslo_concurrency.lockutils [None req-985a7c40-627b-4307-96e2-c613c7f7892a tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "e2e74700-aa83-484a-a61f-9f98a6019fdb" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1218.052669] env[68638]: DEBUG oslo_concurrency.lockutils [None req-985a7c40-627b-4307-96e2-c613c7f7892a tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "e2e74700-aa83-484a-a61f-9f98a6019fdb" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1218.098467] env[68638]: DEBUG oslo_concurrency.lockutils [req-098ace25-ba26-4c9a-b4db-428ad9a0ba61 req-f1765356-4efd-4e8f-bf7c-338314330e7e service nova] Releasing lock "refresh_cache-2452dd7a-5f16-4094-9407-59405eed572b" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1218.289830] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834585, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.407175} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.290152] env[68638]: INFO nova.virt.vmwareapi.ds_util [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_1fb05ba6-a2fb-4371-9785-6121ee5c71c0/OSTACK_IMG_1fb05ba6-a2fb-4371-9785-6121ee5c71c0.vmdk to [datastore1] devstack-image-cache_base/629cabe9-6521-40cd-a0d0-8bc57ab4999b/629cabe9-6521-40cd-a0d0-8bc57ab4999b.vmdk. [ 1218.290348] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Cleaning up location [datastore1] OSTACK_IMG_1fb05ba6-a2fb-4371-9785-6121ee5c71c0 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1218.290513] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_1fb05ba6-a2fb-4371-9785-6121ee5c71c0 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1218.290767] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d70a6e1a-ecb9-44a9-b7cc-d52259217839 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.296579] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1218.296579] env[68638]: value = "task-2834589" [ 1218.296579] env[68638]: _type = "Task" [ 1218.296579] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.304659] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834589, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.512674] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52486ac2-cfe4-998b-4116-7ccff5831f04, 'name': SearchDatastore_Task, 'duration_secs': 0.087465} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.512975] env[68638]: DEBUG oslo_concurrency.lockutils [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1218.513235] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1218.513477] env[68638]: DEBUG oslo_concurrency.lockutils [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1218.513624] env[68638]: DEBUG oslo_concurrency.lockutils [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1218.513829] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1218.514093] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cba6a583-3dfe-430c-b780-a9022fed5e24 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.522505] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1218.522653] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1218.523389] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-831b7a5c-194c-46e5-8e36-e677ef0068bb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.528410] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Waiting for the task: (returnval){ [ 1218.528410] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]523e3a03-49d4-d99b-7d63-c4ec5ec7dfe3" [ 1218.528410] env[68638]: _type = "Task" [ 1218.528410] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.535839] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523e3a03-49d4-d99b-7d63-c4ec5ec7dfe3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.556089] env[68638]: DEBUG nova.compute.utils [None req-985a7c40-627b-4307-96e2-c613c7f7892a tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1218.806217] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834589, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.03591} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.806469] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1218.806594] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/629cabe9-6521-40cd-a0d0-8bc57ab4999b/629cabe9-6521-40cd-a0d0-8bc57ab4999b.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1218.806832] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/629cabe9-6521-40cd-a0d0-8bc57ab4999b/629cabe9-6521-40cd-a0d0-8bc57ab4999b.vmdk to [datastore1] 25c35c36-71c9-48cd-b7e4-6293eef890e5/25c35c36-71c9-48cd-b7e4-6293eef890e5.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1218.807100] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cd99f5a9-90c9-4d42-a91d-ffdc00997254 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.814363] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1218.814363] env[68638]: value = "task-2834590" [ 1218.814363] env[68638]: _type = "Task" [ 1218.814363] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.821496] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834590, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.041309] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523e3a03-49d4-d99b-7d63-c4ec5ec7dfe3, 'name': SearchDatastore_Task, 'duration_secs': 0.008964} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.042224] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f221cbb2-25dc-47db-b549-3a8b802720aa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.050077] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Waiting for the task: (returnval){ [ 1219.050077] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ca3814-01fe-816d-f573-e6be23ed020f" [ 1219.050077] env[68638]: _type = "Task" [ 1219.050077] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.062677] env[68638]: DEBUG oslo_concurrency.lockutils [None req-985a7c40-627b-4307-96e2-c613c7f7892a tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "e2e74700-aa83-484a-a61f-9f98a6019fdb" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1219.063207] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ca3814-01fe-816d-f573-e6be23ed020f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.327111] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834590, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.563413] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ca3814-01fe-816d-f573-e6be23ed020f, 'name': SearchDatastore_Task, 'duration_secs': 0.0702} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.563711] env[68638]: DEBUG oslo_concurrency.lockutils [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1219.564023] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 4db12faa-4c35-42ae-add5-19372e1d8807/4db12faa-4c35-42ae-add5-19372e1d8807.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1219.564271] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0b1cd49f-334e-48ff-a025-1c57d79feda1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.575517] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Waiting for the task: (returnval){ [ 1219.575517] env[68638]: value = "task-2834591" [ 1219.575517] env[68638]: _type = "Task" [ 1219.575517] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.585266] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Task: {'id': task-2834591, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.827728] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834590, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.088020] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Task: {'id': task-2834591, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.129064] env[68638]: DEBUG oslo_concurrency.lockutils [None req-985a7c40-627b-4307-96e2-c613c7f7892a tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "e2e74700-aa83-484a-a61f-9f98a6019fdb" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1220.129358] env[68638]: DEBUG oslo_concurrency.lockutils [None req-985a7c40-627b-4307-96e2-c613c7f7892a tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "e2e74700-aa83-484a-a61f-9f98a6019fdb" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1220.129607] env[68638]: INFO nova.compute.manager [None req-985a7c40-627b-4307-96e2-c613c7f7892a tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Attaching volume b0403e3d-fdec-4ff2-9129-f47f1f78370d to /dev/sdb [ 1220.162521] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71534528-5e29-42c7-a518-5a1dda273428 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.171679] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ec9992-c20c-44c9-9880-4617ed821c7d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.187629] env[68638]: DEBUG nova.virt.block_device [None req-985a7c40-627b-4307-96e2-c613c7f7892a tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Updating existing volume attachment record: 7107e699-eb88-4189-ac89-4fe00ddad92a {{(pid=68638) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1220.327904] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834590, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.587556] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Task: {'id': task-2834591, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.828127] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834590, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.086406] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Task: {'id': task-2834591, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.327777] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834590, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.284089} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.328127] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/629cabe9-6521-40cd-a0d0-8bc57ab4999b/629cabe9-6521-40cd-a0d0-8bc57ab4999b.vmdk to [datastore1] 25c35c36-71c9-48cd-b7e4-6293eef890e5/25c35c36-71c9-48cd-b7e4-6293eef890e5.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1221.329060] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce31261a-4ed9-403b-bce2-be2cb340b550 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.353647] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] 25c35c36-71c9-48cd-b7e4-6293eef890e5/25c35c36-71c9-48cd-b7e4-6293eef890e5.vmdk or device None with type streamOptimized {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1221.353995] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd4d8c86-c912-425c-92b5-eaefa7439aeb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.374350] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1221.374350] env[68638]: value = "task-2834593" [ 1221.374350] env[68638]: _type = "Task" [ 1221.374350] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.383786] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834593, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.586863] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Task: {'id': task-2834591, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.829642} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.587118] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 4db12faa-4c35-42ae-add5-19372e1d8807/4db12faa-4c35-42ae-add5-19372e1d8807.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1221.587339] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1221.587588] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2caee9ef-518d-41fc-84f6-30f00c4866f9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.595879] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Waiting for the task: (returnval){ [ 1221.595879] env[68638]: value = "task-2834594" [ 1221.595879] env[68638]: _type = "Task" [ 1221.595879] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.603227] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Task: {'id': task-2834594, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.883943] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834593, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.104707] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Task: {'id': task-2834594, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.336032} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.105049] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1222.105871] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d38599c-6962-4b38-a9ff-2a5aa4562bc1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.127707] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] 4db12faa-4c35-42ae-add5-19372e1d8807/4db12faa-4c35-42ae-add5-19372e1d8807.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1222.127975] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eec190a5-8f4f-41e2-ada8-3563bb99f064 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.146684] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Waiting for the task: (returnval){ [ 1222.146684] env[68638]: value = "task-2834595" [ 1222.146684] env[68638]: _type = "Task" [ 1222.146684] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.155901] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Task: {'id': task-2834595, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.384303] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834593, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.657238] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Task: {'id': task-2834595, 'name': ReconfigVM_Task, 'duration_secs': 0.297218} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.657476] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Reconfigured VM instance instance-00000078 to attach disk [datastore1] 4db12faa-4c35-42ae-add5-19372e1d8807/4db12faa-4c35-42ae-add5-19372e1d8807.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1222.658149] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-90d4c35a-a5cf-47a4-9dbe-3a793efa4023 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.663969] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Waiting for the task: (returnval){ [ 1222.663969] env[68638]: value = "task-2834597" [ 1222.663969] env[68638]: _type = "Task" [ 1222.663969] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.671131] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Task: {'id': task-2834597, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.884931] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834593, 'name': ReconfigVM_Task, 'duration_secs': 1.102219} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.885231] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Reconfigured VM instance instance-00000071 to attach disk [datastore1] 25c35c36-71c9-48cd-b7e4-6293eef890e5/25c35c36-71c9-48cd-b7e4-6293eef890e5.vmdk or device None with type streamOptimized {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1222.885855] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6736ed6c-a84d-470b-8af4-684bdf0798a0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.891868] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1222.891868] env[68638]: value = "task-2834598" [ 1222.891868] env[68638]: _type = "Task" [ 1222.891868] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.899344] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834598, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.173926] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Task: {'id': task-2834597, 'name': Rename_Task, 'duration_secs': 0.132122} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.174222] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1223.174467] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-63b445f5-7b4b-49af-8c30-5551d7e682a5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.180416] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Waiting for the task: (returnval){ [ 1223.180416] env[68638]: value = "task-2834599" [ 1223.180416] env[68638]: _type = "Task" [ 1223.180416] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.188017] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Task: {'id': task-2834599, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.401400] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834598, 'name': Rename_Task, 'duration_secs': 0.220958} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.401787] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1223.401911] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aeafd8d5-3b28-4906-b8cd-db6f182a99af {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.408617] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1223.408617] env[68638]: value = "task-2834600" [ 1223.408617] env[68638]: _type = "Task" [ 1223.408617] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.417674] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834600, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.690451] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Task: {'id': task-2834599, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.919081] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834600, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.190382] env[68638]: DEBUG oslo_vmware.api [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Task: {'id': task-2834599, 'name': PowerOnVM_Task, 'duration_secs': 0.638928} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.190650] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1224.190860] env[68638]: INFO nova.compute.manager [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Took 9.86 seconds to spawn the instance on the hypervisor. [ 1224.191057] env[68638]: DEBUG nova.compute.manager [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1224.191826] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba6d00d5-32db-4e23-8cef-b22d859e3813 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.419215] env[68638]: DEBUG oslo_vmware.api [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834600, 'name': PowerOnVM_Task, 'duration_secs': 0.522735} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.419580] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1224.551691] env[68638]: DEBUG nova.compute.manager [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1224.552669] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d650d6ce-c4c3-4b2f-aa91-4195ade463c4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.709071] env[68638]: INFO nova.compute.manager [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Took 19.77 seconds to build instance. [ 1224.735022] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-985a7c40-627b-4307-96e2-c613c7f7892a tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Volume attach. Driver type: vmdk {{(pid=68638) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1224.735022] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-985a7c40-627b-4307-96e2-c613c7f7892a tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570059', 'volume_id': 'b0403e3d-fdec-4ff2-9129-f47f1f78370d', 'name': 'volume-b0403e3d-fdec-4ff2-9129-f47f1f78370d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e2e74700-aa83-484a-a61f-9f98a6019fdb', 'attached_at': '', 'detached_at': '', 'volume_id': 'b0403e3d-fdec-4ff2-9129-f47f1f78370d', 'serial': 'b0403e3d-fdec-4ff2-9129-f47f1f78370d'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1224.735749] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b912d56-0c98-456e-aaa2-f2dd28b88502 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.752346] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e0380cc-1726-47ac-b1f1-ce546411ce6b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.776386] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-985a7c40-627b-4307-96e2-c613c7f7892a tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] volume-b0403e3d-fdec-4ff2-9129-f47f1f78370d/volume-b0403e3d-fdec-4ff2-9129-f47f1f78370d.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1224.776755] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a1d6750c-2eb1-4897-8ce0-916dd97b0b38 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.794504] env[68638]: DEBUG oslo_vmware.api [None req-985a7c40-627b-4307-96e2-c613c7f7892a tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1224.794504] env[68638]: value = "task-2834601" [ 1224.794504] env[68638]: _type = "Task" [ 1224.794504] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.803246] env[68638]: DEBUG oslo_vmware.api [None req-985a7c40-627b-4307-96e2-c613c7f7892a tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834601, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.069679] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ba1a9630-34e3-4016-bbc4-d7973a75c3f5 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "25c35c36-71c9-48cd-b7e4-6293eef890e5" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 21.536s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.210888] env[68638]: DEBUG oslo_concurrency.lockutils [None req-49ec6a72-0818-4254-b390-bac68f4e8119 tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Lock "4db12faa-4c35-42ae-add5-19372e1d8807" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.280s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.304954] env[68638]: DEBUG oslo_vmware.api [None req-985a7c40-627b-4307-96e2-c613c7f7892a tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834601, 'name': ReconfigVM_Task, 'duration_secs': 0.363536} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.305249] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-985a7c40-627b-4307-96e2-c613c7f7892a tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Reconfigured VM instance instance-00000075 to attach disk [datastore1] volume-b0403e3d-fdec-4ff2-9129-f47f1f78370d/volume-b0403e3d-fdec-4ff2-9129-f47f1f78370d.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1225.310137] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b25b49c1-17e0-4c69-ba34-2ba8a604b081 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.324902] env[68638]: DEBUG oslo_vmware.api [None req-985a7c40-627b-4307-96e2-c613c7f7892a tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1225.324902] env[68638]: value = "task-2834602" [ 1225.324902] env[68638]: _type = "Task" [ 1225.324902] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.332870] env[68638]: DEBUG oslo_vmware.api [None req-985a7c40-627b-4307-96e2-c613c7f7892a tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834602, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.835514] env[68638]: DEBUG oslo_vmware.api [None req-985a7c40-627b-4307-96e2-c613c7f7892a tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834602, 'name': ReconfigVM_Task, 'duration_secs': 0.138012} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.835842] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-985a7c40-627b-4307-96e2-c613c7f7892a tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570059', 'volume_id': 'b0403e3d-fdec-4ff2-9129-f47f1f78370d', 'name': 'volume-b0403e3d-fdec-4ff2-9129-f47f1f78370d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e2e74700-aa83-484a-a61f-9f98a6019fdb', 'attached_at': '', 'detached_at': '', 'volume_id': 'b0403e3d-fdec-4ff2-9129-f47f1f78370d', 'serial': 'b0403e3d-fdec-4ff2-9129-f47f1f78370d'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1226.025029] env[68638]: DEBUG oslo_concurrency.lockutils [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Acquiring lock "4db12faa-4c35-42ae-add5-19372e1d8807" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.025303] env[68638]: DEBUG oslo_concurrency.lockutils [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Lock "4db12faa-4c35-42ae-add5-19372e1d8807" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.025489] env[68638]: DEBUG oslo_concurrency.lockutils [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Acquiring lock "4db12faa-4c35-42ae-add5-19372e1d8807-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.025674] env[68638]: DEBUG oslo_concurrency.lockutils [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Lock "4db12faa-4c35-42ae-add5-19372e1d8807-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.025872] env[68638]: DEBUG oslo_concurrency.lockutils [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Lock "4db12faa-4c35-42ae-add5-19372e1d8807-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1226.028084] env[68638]: INFO nova.compute.manager [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Terminating instance [ 1226.531881] env[68638]: DEBUG nova.compute.manager [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1226.532191] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1226.533238] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50afc89-24a5-4100-b377-0c84f55aa30d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.541579] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1226.541870] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5645b501-2b72-42ac-b738-d5419f2a76ad {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.548536] env[68638]: DEBUG oslo_vmware.api [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Waiting for the task: (returnval){ [ 1226.548536] env[68638]: value = "task-2834603" [ 1226.548536] env[68638]: _type = "Task" [ 1226.548536] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.558765] env[68638]: DEBUG oslo_vmware.api [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Task: {'id': task-2834603, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.873408] env[68638]: DEBUG nova.objects.instance [None req-985a7c40-627b-4307-96e2-c613c7f7892a tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lazy-loading 'flavor' on Instance uuid e2e74700-aa83-484a-a61f-9f98a6019fdb {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1227.060225] env[68638]: DEBUG oslo_vmware.api [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Task: {'id': task-2834603, 'name': PowerOffVM_Task, 'duration_secs': 0.176832} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.060520] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1227.060704] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1227.060945] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a45d6354-db7d-41f5-a518-8a5c630fcb15 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.127610] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1227.127884] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1227.128097] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Deleting the datastore file [datastore1] 4db12faa-4c35-42ae-add5-19372e1d8807 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1227.128392] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b824188f-7306-4fb5-8380-eaccd1acf9a2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.134611] env[68638]: DEBUG oslo_vmware.api [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Waiting for the task: (returnval){ [ 1227.134611] env[68638]: value = "task-2834605" [ 1227.134611] env[68638]: _type = "Task" [ 1227.134611] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.142040] env[68638]: DEBUG oslo_vmware.api [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Task: {'id': task-2834605, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.378172] env[68638]: DEBUG oslo_concurrency.lockutils [None req-985a7c40-627b-4307-96e2-c613c7f7892a tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "e2e74700-aa83-484a-a61f-9f98a6019fdb" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.249s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1227.605652] env[68638]: DEBUG oslo_concurrency.lockutils [None req-edd15cca-e86e-49aa-9129-a60ef6fa4bfe tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "e2e74700-aa83-484a-a61f-9f98a6019fdb" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1227.605965] env[68638]: DEBUG oslo_concurrency.lockutils [None req-edd15cca-e86e-49aa-9129-a60ef6fa4bfe tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "e2e74700-aa83-484a-a61f-9f98a6019fdb" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1227.643728] env[68638]: DEBUG oslo_vmware.api [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Task: {'id': task-2834605, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136988} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.643934] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1227.644131] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1227.644311] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1227.644483] env[68638]: INFO nova.compute.manager [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1227.644733] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1227.644901] env[68638]: DEBUG nova.compute.manager [-] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1227.644996] env[68638]: DEBUG nova.network.neutron [-] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1227.871604] env[68638]: DEBUG nova.compute.manager [req-8570616a-8d3e-4611-adb6-5046c94045ac req-905b2fc9-d695-40a8-aad6-bf3e53bbc09b service nova] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Received event network-vif-deleted-2e2b1e44-37eb-48ac-be57-6099eb8cc84f {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1227.871856] env[68638]: INFO nova.compute.manager [req-8570616a-8d3e-4611-adb6-5046c94045ac req-905b2fc9-d695-40a8-aad6-bf3e53bbc09b service nova] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Neutron deleted interface 2e2b1e44-37eb-48ac-be57-6099eb8cc84f; detaching it from the instance and deleting it from the info cache [ 1227.872483] env[68638]: DEBUG nova.network.neutron [req-8570616a-8d3e-4611-adb6-5046c94045ac req-905b2fc9-d695-40a8-aad6-bf3e53bbc09b service nova] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.108674] env[68638]: INFO nova.compute.manager [None req-edd15cca-e86e-49aa-9129-a60ef6fa4bfe tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Detaching volume b0403e3d-fdec-4ff2-9129-f47f1f78370d [ 1228.146872] env[68638]: INFO nova.virt.block_device [None req-edd15cca-e86e-49aa-9129-a60ef6fa4bfe tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Attempting to driver detach volume b0403e3d-fdec-4ff2-9129-f47f1f78370d from mountpoint /dev/sdb [ 1228.147235] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-edd15cca-e86e-49aa-9129-a60ef6fa4bfe tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Volume detach. Driver type: vmdk {{(pid=68638) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1228.147506] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-edd15cca-e86e-49aa-9129-a60ef6fa4bfe tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570059', 'volume_id': 'b0403e3d-fdec-4ff2-9129-f47f1f78370d', 'name': 'volume-b0403e3d-fdec-4ff2-9129-f47f1f78370d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e2e74700-aa83-484a-a61f-9f98a6019fdb', 'attached_at': '', 'detached_at': '', 'volume_id': 'b0403e3d-fdec-4ff2-9129-f47f1f78370d', 'serial': 'b0403e3d-fdec-4ff2-9129-f47f1f78370d'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1228.148769] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b77f2a5-6a29-4e92-ab11-c12a67532498 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.171766] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477eec3d-8dc0-49ed-b07b-57f29c4c18cd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.178626] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-404205a9-51e9-4e59-8902-3bc574e891b1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.197993] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5db03c69-96fb-4627-81ab-9730600e8b7e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.211834] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-edd15cca-e86e-49aa-9129-a60ef6fa4bfe tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] The volume has not been displaced from its original location: [datastore1] volume-b0403e3d-fdec-4ff2-9129-f47f1f78370d/volume-b0403e3d-fdec-4ff2-9129-f47f1f78370d.vmdk. No consolidation needed. {{(pid=68638) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1228.216934] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-edd15cca-e86e-49aa-9129-a60ef6fa4bfe tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Reconfiguring VM instance instance-00000075 to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1228.217194] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34e16389-01b0-445e-b834-65cc63a064a4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.234753] env[68638]: DEBUG oslo_vmware.api [None req-edd15cca-e86e-49aa-9129-a60ef6fa4bfe tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1228.234753] env[68638]: value = "task-2834606" [ 1228.234753] env[68638]: _type = "Task" [ 1228.234753] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.241782] env[68638]: DEBUG oslo_vmware.api [None req-edd15cca-e86e-49aa-9129-a60ef6fa4bfe tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834606, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.352478] env[68638]: DEBUG nova.network.neutron [-] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.374906] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-025eeec7-baa9-4277-afec-3fb711a18140 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.384737] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8d0efb-d47a-4ae2-8bbf-db23fde6528c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.413906] env[68638]: DEBUG nova.compute.manager [req-8570616a-8d3e-4611-adb6-5046c94045ac req-905b2fc9-d695-40a8-aad6-bf3e53bbc09b service nova] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Detach interface failed, port_id=2e2b1e44-37eb-48ac-be57-6099eb8cc84f, reason: Instance 4db12faa-4c35-42ae-add5-19372e1d8807 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1228.744108] env[68638]: DEBUG oslo_vmware.api [None req-edd15cca-e86e-49aa-9129-a60ef6fa4bfe tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834606, 'name': ReconfigVM_Task, 'duration_secs': 0.30072} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.744344] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-edd15cca-e86e-49aa-9129-a60ef6fa4bfe tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Reconfigured VM instance instance-00000075 to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1228.748824] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d4caba96-4b17-4137-9bfb-6da1a2fe5e5e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.763115] env[68638]: DEBUG oslo_vmware.api [None req-edd15cca-e86e-49aa-9129-a60ef6fa4bfe tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1228.763115] env[68638]: value = "task-2834607" [ 1228.763115] env[68638]: _type = "Task" [ 1228.763115] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.770071] env[68638]: DEBUG oslo_vmware.api [None req-edd15cca-e86e-49aa-9129-a60ef6fa4bfe tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834607, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.855808] env[68638]: INFO nova.compute.manager [-] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Took 1.21 seconds to deallocate network for instance. [ 1229.272695] env[68638]: DEBUG oslo_vmware.api [None req-edd15cca-e86e-49aa-9129-a60ef6fa4bfe tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834607, 'name': ReconfigVM_Task, 'duration_secs': 0.126208} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.273090] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-edd15cca-e86e-49aa-9129-a60ef6fa4bfe tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570059', 'volume_id': 'b0403e3d-fdec-4ff2-9129-f47f1f78370d', 'name': 'volume-b0403e3d-fdec-4ff2-9129-f47f1f78370d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e2e74700-aa83-484a-a61f-9f98a6019fdb', 'attached_at': '', 'detached_at': '', 'volume_id': 'b0403e3d-fdec-4ff2-9129-f47f1f78370d', 'serial': 'b0403e3d-fdec-4ff2-9129-f47f1f78370d'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1229.362455] env[68638]: DEBUG oslo_concurrency.lockutils [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1229.362734] env[68638]: DEBUG oslo_concurrency.lockutils [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1229.362956] env[68638]: DEBUG nova.objects.instance [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Lazy-loading 'resources' on Instance uuid 4db12faa-4c35-42ae-add5-19372e1d8807 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1229.812446] env[68638]: DEBUG nova.objects.instance [None req-edd15cca-e86e-49aa-9129-a60ef6fa4bfe tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lazy-loading 'flavor' on Instance uuid e2e74700-aa83-484a-a61f-9f98a6019fdb {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1229.944610] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-039b7378-028b-4b18-85a4-5683776ef12d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.951970] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d80f14ee-541a-4515-91c2-4f2dd74a57ad {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.981971] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa7d826-1058-4362-93b0-57af80236ef0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.988894] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a2ac6b3-bf06-448f-a7eb-8bcf24417b66 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.002710] env[68638]: DEBUG nova.compute.provider_tree [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1230.505670] env[68638]: DEBUG nova.scheduler.client.report [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1230.819838] env[68638]: DEBUG oslo_concurrency.lockutils [None req-edd15cca-e86e-49aa-9129-a60ef6fa4bfe tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "e2e74700-aa83-484a-a61f-9f98a6019fdb" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.214s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.010105] env[68638]: DEBUG oslo_concurrency.lockutils [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.647s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.027849] env[68638]: INFO nova.scheduler.client.report [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Deleted allocations for instance 4db12faa-4c35-42ae-add5-19372e1d8807 [ 1231.535412] env[68638]: DEBUG oslo_concurrency.lockutils [None req-44e3fe4c-391d-4e0c-aecc-51dd093d4a4e tempest-ServerAddressesTestJSON-1875074317 tempest-ServerAddressesTestJSON-1875074317-project-member] Lock "4db12faa-4c35-42ae-add5-19372e1d8807" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.510s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.851568] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "e2e74700-aa83-484a-a61f-9f98a6019fdb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1231.851824] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "e2e74700-aa83-484a-a61f-9f98a6019fdb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1231.852054] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "e2e74700-aa83-484a-a61f-9f98a6019fdb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1231.852250] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "e2e74700-aa83-484a-a61f-9f98a6019fdb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1231.852428] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "e2e74700-aa83-484a-a61f-9f98a6019fdb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.854463] env[68638]: INFO nova.compute.manager [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Terminating instance [ 1232.358692] env[68638]: DEBUG nova.compute.manager [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1232.358930] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1232.359874] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a300c33-c6f6-4368-b500-1b83f274358a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.368092] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1232.368328] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b69152a1-1316-47a7-966c-151b71b49e42 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.374684] env[68638]: DEBUG oslo_vmware.api [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1232.374684] env[68638]: value = "task-2834608" [ 1232.374684] env[68638]: _type = "Task" [ 1232.374684] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.384385] env[68638]: DEBUG oslo_vmware.api [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834608, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.884806] env[68638]: DEBUG oslo_vmware.api [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834608, 'name': PowerOffVM_Task, 'duration_secs': 0.221336} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.885137] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1232.885317] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1232.885570] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-417d0821-0877-49cb-9c32-0a9fd9e59845 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.889243] env[68638]: INFO nova.compute.manager [None req-99dfe8c5-6838-4af4-b1ee-de07fd9503a5 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Get console output [ 1232.889704] env[68638]: WARNING nova.virt.vmwareapi.driver [None req-99dfe8c5-6838-4af4-b1ee-de07fd9503a5 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] The console log is missing. Check your VSPC configuration [ 1232.957801] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1232.958200] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1232.958511] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Deleting the datastore file [datastore2] e2e74700-aa83-484a-a61f-9f98a6019fdb {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1232.958879] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ba03012-0d0c-4afd-91f2-c8d33b81a4f4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.966245] env[68638]: DEBUG oslo_vmware.api [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for the task: (returnval){ [ 1232.966245] env[68638]: value = "task-2834610" [ 1232.966245] env[68638]: _type = "Task" [ 1232.966245] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.978111] env[68638]: DEBUG oslo_vmware.api [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834610, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.481585] env[68638]: DEBUG oslo_vmware.api [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Task: {'id': task-2834610, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141634} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.482258] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1233.482258] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1233.482258] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1233.482451] env[68638]: INFO nova.compute.manager [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1233.482696] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1233.482778] env[68638]: DEBUG nova.compute.manager [-] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1233.483639] env[68638]: DEBUG nova.network.neutron [-] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1233.978444] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "53e92f51-9010-4fb2-89e1-9d16a252ef6e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1233.978764] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "53e92f51-9010-4fb2-89e1-9d16a252ef6e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1233.979021] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "53e92f51-9010-4fb2-89e1-9d16a252ef6e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1233.979278] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "53e92f51-9010-4fb2-89e1-9d16a252ef6e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1233.979482] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "53e92f51-9010-4fb2-89e1-9d16a252ef6e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.982325] env[68638]: INFO nova.compute.manager [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Terminating instance [ 1233.992267] env[68638]: DEBUG nova.compute.manager [req-a5148283-76b4-410a-96e1-33d89ec38da3 req-7782e6b3-ac73-405c-8eb9-ded3e4cbe6b3 service nova] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Received event network-vif-deleted-da4e63a0-6fb2-436b-b720-8c1e1b21decf {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1233.992516] env[68638]: INFO nova.compute.manager [req-a5148283-76b4-410a-96e1-33d89ec38da3 req-7782e6b3-ac73-405c-8eb9-ded3e4cbe6b3 service nova] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Neutron deleted interface da4e63a0-6fb2-436b-b720-8c1e1b21decf; detaching it from the instance and deleting it from the info cache [ 1233.993068] env[68638]: DEBUG nova.network.neutron [req-a5148283-76b4-410a-96e1-33d89ec38da3 req-7782e6b3-ac73-405c-8eb9-ded3e4cbe6b3 service nova] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1234.476168] env[68638]: DEBUG nova.network.neutron [-] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1234.485924] env[68638]: DEBUG nova.compute.manager [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1234.486187] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1234.487143] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47c3f35e-14a4-40e4-bdca-62d01f853807 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.496186] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1234.496405] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-30173293-25a6-40f2-8de6-45db02b3156a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.498090] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb0f878b-0e5c-4075-9024-3ca9d6af930e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.506186] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae65c9b7-1957-4a68-b83d-758371af7bc1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.517912] env[68638]: DEBUG oslo_vmware.api [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1234.517912] env[68638]: value = "task-2834611" [ 1234.517912] env[68638]: _type = "Task" [ 1234.517912] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.525679] env[68638]: DEBUG oslo_vmware.api [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834611, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.539543] env[68638]: DEBUG nova.compute.manager [req-a5148283-76b4-410a-96e1-33d89ec38da3 req-7782e6b3-ac73-405c-8eb9-ded3e4cbe6b3 service nova] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Detach interface failed, port_id=da4e63a0-6fb2-436b-b720-8c1e1b21decf, reason: Instance e2e74700-aa83-484a-a61f-9f98a6019fdb could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1234.979078] env[68638]: INFO nova.compute.manager [-] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Took 1.50 seconds to deallocate network for instance. [ 1235.028818] env[68638]: DEBUG oslo_vmware.api [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834611, 'name': PowerOffVM_Task, 'duration_secs': 0.199795} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.029214] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1235.029410] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1235.029669] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f8f5dc86-da52-4415-a93d-482a747a8337 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.100096] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1235.100342] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1235.100525] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Deleting the datastore file [datastore2] 53e92f51-9010-4fb2-89e1-9d16a252ef6e {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1235.100780] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce733294-4fb3-45d7-bb1d-00e07fabc92c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.106969] env[68638]: DEBUG oslo_vmware.api [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1235.106969] env[68638]: value = "task-2834613" [ 1235.106969] env[68638]: _type = "Task" [ 1235.106969] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.114750] env[68638]: DEBUG oslo_vmware.api [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834613, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.485841] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.486178] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.486409] env[68638]: DEBUG nova.objects.instance [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lazy-loading 'resources' on Instance uuid e2e74700-aa83-484a-a61f-9f98a6019fdb {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1235.616640] env[68638]: DEBUG oslo_vmware.api [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834613, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131595} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.616898] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1235.617098] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1235.617279] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1235.617453] env[68638]: INFO nova.compute.manager [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1235.617683] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1235.617871] env[68638]: DEBUG nova.compute.manager [-] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1235.618015] env[68638]: DEBUG nova.network.neutron [-] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1236.040396] env[68638]: DEBUG nova.compute.manager [req-de2e78f6-19ba-4419-80e8-04f8ef2aff6b req-969316f2-08c8-4a95-88dc-03852bab3c27 service nova] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Received event network-vif-deleted-a087b668-2b77-40e4-8a37-af9d56aa8b57 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1236.040684] env[68638]: INFO nova.compute.manager [req-de2e78f6-19ba-4419-80e8-04f8ef2aff6b req-969316f2-08c8-4a95-88dc-03852bab3c27 service nova] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Neutron deleted interface a087b668-2b77-40e4-8a37-af9d56aa8b57; detaching it from the instance and deleting it from the info cache [ 1236.040919] env[68638]: DEBUG nova.network.neutron [req-de2e78f6-19ba-4419-80e8-04f8ef2aff6b req-969316f2-08c8-4a95-88dc-03852bab3c27 service nova] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.070738] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e76106d2-ce58-422d-b45f-8267d6fdc12f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.078097] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a6bb65-2cf2-4f97-a4e8-4afb4471d8a1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.111683] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be8d573-91e0-4106-8093-e11a141bf947 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.118867] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-855c4257-f982-45d6-9087-8506b0f1283b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.131567] env[68638]: DEBUG nova.compute.provider_tree [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1236.520031] env[68638]: DEBUG nova.network.neutron [-] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.543141] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-46e6bbf3-f140-4c3f-bdc9-cbef049668ce {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.553950] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bc5d550-c195-4d58-a649-67f1fa629997 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.582328] env[68638]: DEBUG nova.compute.manager [req-de2e78f6-19ba-4419-80e8-04f8ef2aff6b req-969316f2-08c8-4a95-88dc-03852bab3c27 service nova] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Detach interface failed, port_id=a087b668-2b77-40e4-8a37-af9d56aa8b57, reason: Instance 53e92f51-9010-4fb2-89e1-9d16a252ef6e could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1236.634657] env[68638]: DEBUG nova.scheduler.client.report [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1237.024053] env[68638]: INFO nova.compute.manager [-] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Took 1.40 seconds to deallocate network for instance. [ 1237.139845] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.654s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.163563] env[68638]: INFO nova.scheduler.client.report [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Deleted allocations for instance e2e74700-aa83-484a-a61f-9f98a6019fdb [ 1237.530067] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.530454] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1237.530722] env[68638]: DEBUG nova.objects.instance [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lazy-loading 'resources' on Instance uuid 53e92f51-9010-4fb2-89e1-9d16a252ef6e {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1237.671604] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d09979ff-fd0b-492f-b20f-d1b4fa07b01c tempest-AttachVolumeNegativeTest-754123378 tempest-AttachVolumeNegativeTest-754123378-project-member] Lock "e2e74700-aa83-484a-a61f-9f98a6019fdb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.820s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1238.091401] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e68235-02c2-4d72-9c25-2a23093070e6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.099297] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff930db-fd6e-4f1b-8346-56ddf10fe2f1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.130260] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d5d5769-3775-46fe-908e-6a89eb29598f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.137425] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce77e8c-1fdd-4e27-a9f0-2a926b2893a6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.150561] env[68638]: DEBUG nova.compute.provider_tree [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1238.656768] env[68638]: DEBUG nova.scheduler.client.report [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1239.161944] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.631s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1239.182606] env[68638]: INFO nova.scheduler.client.report [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Deleted allocations for instance 53e92f51-9010-4fb2-89e1-9d16a252ef6e [ 1239.690384] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dde74063-fb9e-4e7e-aa21-c04c0abb8a18 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "53e92f51-9010-4fb2-89e1-9d16a252ef6e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.712s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1241.928039] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "e6bb1034-e440-4fb2-ba56-a734c4f67cdb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1241.928039] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "e6bb1034-e440-4fb2-ba56-a734c4f67cdb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1242.430257] env[68638]: DEBUG nova.compute.manager [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1242.952896] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1242.952896] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1242.954457] env[68638]: INFO nova.compute.claims [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1243.167452] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1243.167664] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1243.167822] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1243.167991] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1243.168173] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1243.168322] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1243.168471] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1243.168614] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68638) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1243.340374] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager.update_available_resource {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1243.842998] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1244.018469] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf1285f-8417-4e4b-b6a7-8f86cf079799 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.025801] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63822e83-2dc0-4de0-bdcc-2406a7a4217e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.054609] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69a97ef-b73f-466a-aa8e-53dee1e0d0d6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.061266] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f29aa9-8195-4039-848c-ebf220106410 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.073583] env[68638]: DEBUG nova.compute.provider_tree [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1244.577093] env[68638]: DEBUG nova.scheduler.client.report [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1245.081830] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.129s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1245.082385] env[68638]: DEBUG nova.compute.manager [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1245.085142] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.242s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1245.085334] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1245.085550] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68638) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1245.086646] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ce6de8-dfb8-460c-b3a8-663758b6415b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.095332] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fcfd393-a050-4e89-afea-7372a286dadc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.111372] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e5aa82a-7c72-4065-8e41-764da21e98b4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.118096] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3604298e-1257-43c5-b298-a91359692ac6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.146944] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180475MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=68638) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1245.147090] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1245.147300] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1245.588698] env[68638]: DEBUG nova.compute.utils [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1245.590128] env[68638]: DEBUG nova.compute.manager [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1245.590299] env[68638]: DEBUG nova.network.neutron [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1245.635276] env[68638]: DEBUG nova.policy [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '87dbe1b58a124d8ba72432b58a711496', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03a342a1ef674059b9ab1a5dc050a82d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1245.907929] env[68638]: DEBUG nova.network.neutron [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Successfully created port: d0d7d883-914c-4c61-b0d1-3a31b2df0943 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1246.093383] env[68638]: DEBUG nova.compute.manager [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1246.172221] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1246.172466] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 2452dd7a-5f16-4094-9407-59405eed572b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1246.172643] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 25c35c36-71c9-48cd-b7e4-6293eef890e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1246.172815] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance e6bb1034-e440-4fb2-ba56-a734c4f67cdb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1246.173058] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1246.173261] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1246.227718] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e254a5-899f-4543-966f-08b18ed30653 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.235359] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3baa57-c6f3-4541-85c0-2d0f45d13c11 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.266601] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f815d4a-185c-48aa-9792-a3a25f2f4079 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.273236] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c431cf45-0b6c-40a0-be3e-cac86ed501f1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.285510] env[68638]: DEBUG nova.compute.provider_tree [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1246.788431] env[68638]: DEBUG nova.scheduler.client.report [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1247.104159] env[68638]: DEBUG nova.compute.manager [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1247.131124] env[68638]: DEBUG nova.virt.hardware [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1247.131389] env[68638]: DEBUG nova.virt.hardware [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1247.131551] env[68638]: DEBUG nova.virt.hardware [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1247.131735] env[68638]: DEBUG nova.virt.hardware [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1247.131882] env[68638]: DEBUG nova.virt.hardware [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1247.132041] env[68638]: DEBUG nova.virt.hardware [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1247.132266] env[68638]: DEBUG nova.virt.hardware [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1247.132421] env[68638]: DEBUG nova.virt.hardware [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1247.132590] env[68638]: DEBUG nova.virt.hardware [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1247.132752] env[68638]: DEBUG nova.virt.hardware [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1247.132922] env[68638]: DEBUG nova.virt.hardware [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1247.133797] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67689cc8-36e1-45e3-9173-9182ec7750b1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.141625] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fddfcbb4-8b7a-430b-89b2-c2c176022d07 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.277177] env[68638]: DEBUG nova.compute.manager [req-c53b61bd-73ac-4855-ad26-1c8a5392b9cf req-e2fc3352-b7b8-4465-aa32-b8614c02c80d service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Received event network-vif-plugged-d0d7d883-914c-4c61-b0d1-3a31b2df0943 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1247.277390] env[68638]: DEBUG oslo_concurrency.lockutils [req-c53b61bd-73ac-4855-ad26-1c8a5392b9cf req-e2fc3352-b7b8-4465-aa32-b8614c02c80d service nova] Acquiring lock "e6bb1034-e440-4fb2-ba56-a734c4f67cdb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1247.277593] env[68638]: DEBUG oslo_concurrency.lockutils [req-c53b61bd-73ac-4855-ad26-1c8a5392b9cf req-e2fc3352-b7b8-4465-aa32-b8614c02c80d service nova] Lock "e6bb1034-e440-4fb2-ba56-a734c4f67cdb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1247.277759] env[68638]: DEBUG oslo_concurrency.lockutils [req-c53b61bd-73ac-4855-ad26-1c8a5392b9cf req-e2fc3352-b7b8-4465-aa32-b8614c02c80d service nova] Lock "e6bb1034-e440-4fb2-ba56-a734c4f67cdb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1247.277925] env[68638]: DEBUG nova.compute.manager [req-c53b61bd-73ac-4855-ad26-1c8a5392b9cf req-e2fc3352-b7b8-4465-aa32-b8614c02c80d service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] No waiting events found dispatching network-vif-plugged-d0d7d883-914c-4c61-b0d1-3a31b2df0943 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1247.278152] env[68638]: WARNING nova.compute.manager [req-c53b61bd-73ac-4855-ad26-1c8a5392b9cf req-e2fc3352-b7b8-4465-aa32-b8614c02c80d service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Received unexpected event network-vif-plugged-d0d7d883-914c-4c61-b0d1-3a31b2df0943 for instance with vm_state building and task_state spawning. [ 1247.292742] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68638) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1247.292914] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.146s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1247.352280] env[68638]: DEBUG nova.network.neutron [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Successfully updated port: d0d7d883-914c-4c61-b0d1-3a31b2df0943 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1247.855323] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1247.855649] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1247.855649] env[68638]: DEBUG nova.network.neutron [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1248.406144] env[68638]: DEBUG nova.network.neutron [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1248.611488] env[68638]: DEBUG nova.network.neutron [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Updating instance_info_cache with network_info: [{"id": "d0d7d883-914c-4c61-b0d1-3a31b2df0943", "address": "fa:16:3e:30:cd:8c", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0d7d883-91", "ovs_interfaceid": "d0d7d883-914c-4c61-b0d1-3a31b2df0943", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1249.114270] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Releasing lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1249.114623] env[68638]: DEBUG nova.compute.manager [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Instance network_info: |[{"id": "d0d7d883-914c-4c61-b0d1-3a31b2df0943", "address": "fa:16:3e:30:cd:8c", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0d7d883-91", "ovs_interfaceid": "d0d7d883-914c-4c61-b0d1-3a31b2df0943", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1249.115066] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:30:cd:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2180b40f-2bb0-47da-ba80-c2fbe7f98af0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd0d7d883-914c-4c61-b0d1-3a31b2df0943', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1249.122757] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1249.123008] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1249.124231] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-068f7ff1-1484-47b5-a3dd-0157a886e488 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.146222] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1249.146222] env[68638]: value = "task-2834617" [ 1249.146222] env[68638]: _type = "Task" [ 1249.146222] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.153117] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834617, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.307599] env[68638]: DEBUG nova.compute.manager [req-01f9d977-d8a3-49c3-ad4e-929cd5dd8d42 req-a391b0ad-2f89-4a43-b395-2024f4bffb7e service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Received event network-changed-d0d7d883-914c-4c61-b0d1-3a31b2df0943 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1249.307599] env[68638]: DEBUG nova.compute.manager [req-01f9d977-d8a3-49c3-ad4e-929cd5dd8d42 req-a391b0ad-2f89-4a43-b395-2024f4bffb7e service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Refreshing instance network info cache due to event network-changed-d0d7d883-914c-4c61-b0d1-3a31b2df0943. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1249.307599] env[68638]: DEBUG oslo_concurrency.lockutils [req-01f9d977-d8a3-49c3-ad4e-929cd5dd8d42 req-a391b0ad-2f89-4a43-b395-2024f4bffb7e service nova] Acquiring lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1249.307599] env[68638]: DEBUG oslo_concurrency.lockutils [req-01f9d977-d8a3-49c3-ad4e-929cd5dd8d42 req-a391b0ad-2f89-4a43-b395-2024f4bffb7e service nova] Acquired lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1249.307599] env[68638]: DEBUG nova.network.neutron [req-01f9d977-d8a3-49c3-ad4e-929cd5dd8d42 req-a391b0ad-2f89-4a43-b395-2024f4bffb7e service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Refreshing network info cache for port d0d7d883-914c-4c61-b0d1-3a31b2df0943 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1249.654817] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834617, 'name': CreateVM_Task, 'duration_secs': 0.336681} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.655218] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1249.655629] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1249.655802] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1249.656162] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1249.656457] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46072831-c580-439b-9ae1-5f9c9334990e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.660796] env[68638]: DEBUG oslo_vmware.api [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1249.660796] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]525c6646-bd53-a711-eecf-1cb134cbff86" [ 1249.660796] env[68638]: _type = "Task" [ 1249.660796] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.668470] env[68638]: DEBUG oslo_vmware.api [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525c6646-bd53-a711-eecf-1cb134cbff86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.007817] env[68638]: DEBUG nova.network.neutron [req-01f9d977-d8a3-49c3-ad4e-929cd5dd8d42 req-a391b0ad-2f89-4a43-b395-2024f4bffb7e service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Updated VIF entry in instance network info cache for port d0d7d883-914c-4c61-b0d1-3a31b2df0943. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1250.007817] env[68638]: DEBUG nova.network.neutron [req-01f9d977-d8a3-49c3-ad4e-929cd5dd8d42 req-a391b0ad-2f89-4a43-b395-2024f4bffb7e service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Updating instance_info_cache with network_info: [{"id": "d0d7d883-914c-4c61-b0d1-3a31b2df0943", "address": "fa:16:3e:30:cd:8c", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0d7d883-91", "ovs_interfaceid": "d0d7d883-914c-4c61-b0d1-3a31b2df0943", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.171198] env[68638]: DEBUG oslo_vmware.api [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]525c6646-bd53-a711-eecf-1cb134cbff86, 'name': SearchDatastore_Task, 'duration_secs': 0.009173} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.171454] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1250.171673] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1250.171906] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.172065] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1250.172248] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1250.172499] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d0a70c75-92d3-423d-bf98-a31728ee0a33 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.180077] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1250.180280] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1250.180932] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbbee6ec-e737-4633-8088-9933a1dd9553 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.185883] env[68638]: DEBUG oslo_vmware.api [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1250.185883] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52506aeb-3321-a8be-c846-c694c241533a" [ 1250.185883] env[68638]: _type = "Task" [ 1250.185883] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.193494] env[68638]: DEBUG oslo_vmware.api [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52506aeb-3321-a8be-c846-c694c241533a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.510052] env[68638]: DEBUG oslo_concurrency.lockutils [req-01f9d977-d8a3-49c3-ad4e-929cd5dd8d42 req-a391b0ad-2f89-4a43-b395-2024f4bffb7e service nova] Releasing lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1250.698359] env[68638]: DEBUG oslo_vmware.api [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52506aeb-3321-a8be-c846-c694c241533a, 'name': SearchDatastore_Task, 'duration_secs': 0.008604} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.699189] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8988c4fb-4de3-4201-abbc-0aeaccef9fe4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.704307] env[68638]: DEBUG oslo_vmware.api [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1250.704307] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52cbb1a0-4cf5-bc07-3e08-5612de10cb5a" [ 1250.704307] env[68638]: _type = "Task" [ 1250.704307] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.711791] env[68638]: DEBUG oslo_vmware.api [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52cbb1a0-4cf5-bc07-3e08-5612de10cb5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.214467] env[68638]: DEBUG oslo_vmware.api [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52cbb1a0-4cf5-bc07-3e08-5612de10cb5a, 'name': SearchDatastore_Task, 'duration_secs': 0.008799} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.214724] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1251.214976] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] e6bb1034-e440-4fb2-ba56-a734c4f67cdb/e6bb1034-e440-4fb2-ba56-a734c4f67cdb.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1251.215247] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-18899de0-921f-40c3-8095-68cd8d621af1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.221978] env[68638]: DEBUG oslo_vmware.api [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1251.221978] env[68638]: value = "task-2834618" [ 1251.221978] env[68638]: _type = "Task" [ 1251.221978] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.230291] env[68638]: DEBUG oslo_vmware.api [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834618, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.731372] env[68638]: DEBUG oslo_vmware.api [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834618, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.453987} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.731754] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] e6bb1034-e440-4fb2-ba56-a734c4f67cdb/e6bb1034-e440-4fb2-ba56-a734c4f67cdb.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1251.731869] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1251.732070] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-98ac6c1b-8931-48ca-8e93-b98da244595e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.738585] env[68638]: DEBUG oslo_vmware.api [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1251.738585] env[68638]: value = "task-2834619" [ 1251.738585] env[68638]: _type = "Task" [ 1251.738585] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.745438] env[68638]: DEBUG oslo_vmware.api [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834619, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.248058] env[68638]: DEBUG oslo_vmware.api [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834619, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061521} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.248376] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1252.249195] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2f44618-9d14-4635-9280-93adfde18be1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.270857] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] e6bb1034-e440-4fb2-ba56-a734c4f67cdb/e6bb1034-e440-4fb2-ba56-a734c4f67cdb.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1252.271101] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d5c02a5-949c-4da1-8908-411db9f1445a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.290163] env[68638]: DEBUG oslo_vmware.api [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1252.290163] env[68638]: value = "task-2834620" [ 1252.290163] env[68638]: _type = "Task" [ 1252.290163] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.297233] env[68638]: DEBUG oslo_vmware.api [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834620, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.799904] env[68638]: DEBUG oslo_vmware.api [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834620, 'name': ReconfigVM_Task, 'duration_secs': 0.27583} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.800372] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Reconfigured VM instance instance-00000079 to attach disk [datastore1] e6bb1034-e440-4fb2-ba56-a734c4f67cdb/e6bb1034-e440-4fb2-ba56-a734c4f67cdb.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1252.800831] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-97206dea-6d3f-4477-8ddc-4d7dd9b7fd97 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.807465] env[68638]: DEBUG oslo_vmware.api [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1252.807465] env[68638]: value = "task-2834621" [ 1252.807465] env[68638]: _type = "Task" [ 1252.807465] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.814863] env[68638]: DEBUG oslo_vmware.api [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834621, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.316688] env[68638]: DEBUG oslo_vmware.api [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834621, 'name': Rename_Task, 'duration_secs': 0.123678} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.316960] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1253.317220] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b5afbde9-21d2-4b0b-b9bc-f6e76f0021ec {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.323124] env[68638]: DEBUG oslo_vmware.api [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1253.323124] env[68638]: value = "task-2834622" [ 1253.323124] env[68638]: _type = "Task" [ 1253.323124] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.330169] env[68638]: DEBUG oslo_vmware.api [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834622, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.832959] env[68638]: DEBUG oslo_vmware.api [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834622, 'name': PowerOnVM_Task, 'duration_secs': 0.429234} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.833311] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1253.833522] env[68638]: INFO nova.compute.manager [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Took 6.73 seconds to spawn the instance on the hypervisor. [ 1253.833700] env[68638]: DEBUG nova.compute.manager [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1253.834477] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c76c2f-e0d0-4035-9573-5fb71526af98 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.350876] env[68638]: INFO nova.compute.manager [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Took 11.42 seconds to build instance. [ 1254.406314] env[68638]: INFO nova.compute.manager [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Rebuilding instance [ 1254.449896] env[68638]: DEBUG nova.compute.manager [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1254.450911] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-638991f2-d141-40f0-b081-fb8317c19714 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.855622] env[68638]: DEBUG oslo_concurrency.lockutils [None req-d013b849-4108-44bd-9156-7473e26e7763 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "e6bb1034-e440-4fb2-ba56-a734c4f67cdb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.928s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1254.934970] env[68638]: DEBUG nova.compute.manager [req-a3337e98-4428-42c3-b446-37aad1956705 req-d4d7d558-05ea-4039-85d9-76a70166f66f service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Received event network-changed-d0d7d883-914c-4c61-b0d1-3a31b2df0943 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1254.935289] env[68638]: DEBUG nova.compute.manager [req-a3337e98-4428-42c3-b446-37aad1956705 req-d4d7d558-05ea-4039-85d9-76a70166f66f service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Refreshing instance network info cache due to event network-changed-d0d7d883-914c-4c61-b0d1-3a31b2df0943. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1254.935611] env[68638]: DEBUG oslo_concurrency.lockutils [req-a3337e98-4428-42c3-b446-37aad1956705 req-d4d7d558-05ea-4039-85d9-76a70166f66f service nova] Acquiring lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.935815] env[68638]: DEBUG oslo_concurrency.lockutils [req-a3337e98-4428-42c3-b446-37aad1956705 req-d4d7d558-05ea-4039-85d9-76a70166f66f service nova] Acquired lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1254.936030] env[68638]: DEBUG nova.network.neutron [req-a3337e98-4428-42c3-b446-37aad1956705 req-d4d7d558-05ea-4039-85d9-76a70166f66f service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Refreshing network info cache for port d0d7d883-914c-4c61-b0d1-3a31b2df0943 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1255.464209] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1255.464500] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1907fb30-3450-4ffc-b3fe-8b1b4d5cd276 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.471859] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1255.471859] env[68638]: value = "task-2834623" [ 1255.471859] env[68638]: _type = "Task" [ 1255.471859] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.481239] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834623, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.669715] env[68638]: DEBUG nova.network.neutron [req-a3337e98-4428-42c3-b446-37aad1956705 req-d4d7d558-05ea-4039-85d9-76a70166f66f service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Updated VIF entry in instance network info cache for port d0d7d883-914c-4c61-b0d1-3a31b2df0943. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1255.670111] env[68638]: DEBUG nova.network.neutron [req-a3337e98-4428-42c3-b446-37aad1956705 req-d4d7d558-05ea-4039-85d9-76a70166f66f service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Updating instance_info_cache with network_info: [{"id": "d0d7d883-914c-4c61-b0d1-3a31b2df0943", "address": "fa:16:3e:30:cd:8c", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.214", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0d7d883-91", "ovs_interfaceid": "d0d7d883-914c-4c61-b0d1-3a31b2df0943", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.817333] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "230f1a80-cf88-41c1-984f-d687932461d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1255.817570] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "230f1a80-cf88-41c1-984f-d687932461d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1255.982062] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834623, 'name': PowerOffVM_Task, 'duration_secs': 0.198366} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.982062] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1255.982062] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1255.982734] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db71ae5-af63-4799-9ecb-a735d37a6f81 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.988970] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1255.989200] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-74483e9c-5b6d-47bb-a5bb-b54eb1979341 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.049704] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1256.049937] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1256.050144] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Deleting the datastore file [datastore1] 2452dd7a-5f16-4094-9407-59405eed572b {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1256.050412] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aca80a4d-cf9f-4224-a471-06b0c941402c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.057055] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1256.057055] env[68638]: value = "task-2834625" [ 1256.057055] env[68638]: _type = "Task" [ 1256.057055] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.063972] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834625, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.172753] env[68638]: DEBUG oslo_concurrency.lockutils [req-a3337e98-4428-42c3-b446-37aad1956705 req-d4d7d558-05ea-4039-85d9-76a70166f66f service nova] Releasing lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1256.319777] env[68638]: DEBUG nova.compute.manager [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1256.566790] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834625, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.202659} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.567056] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1256.567219] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1256.567403] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1256.844350] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1256.844676] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1256.846255] env[68638]: INFO nova.compute.claims [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1257.602129] env[68638]: DEBUG nova.virt.hardware [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1257.602683] env[68638]: DEBUG nova.virt.hardware [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1257.602683] env[68638]: DEBUG nova.virt.hardware [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1257.602683] env[68638]: DEBUG nova.virt.hardware [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1257.603030] env[68638]: DEBUG nova.virt.hardware [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1257.603030] env[68638]: DEBUG nova.virt.hardware [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1257.604271] env[68638]: DEBUG nova.virt.hardware [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1257.604271] env[68638]: DEBUG nova.virt.hardware [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1257.604271] env[68638]: DEBUG nova.virt.hardware [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1257.604271] env[68638]: DEBUG nova.virt.hardware [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1257.604271] env[68638]: DEBUG nova.virt.hardware [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1257.604729] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6d6d52-776a-4a93-8313-56e02664ca1d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.612697] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea85ae96-5863-4d87-9863-801a522862cd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.628482] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6c:d6:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19598cc1-e105-4565-906a-09dde75e3fbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c25dd2c-4a3e-4c76-9747-5f4fc5f8d267', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1257.636198] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1257.636462] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1257.636677] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d3d610b2-4131-4536-b3a1-4792d7df1ad2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.656667] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1257.656667] env[68638]: value = "task-2834626" [ 1257.656667] env[68638]: _type = "Task" [ 1257.656667] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.664355] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834626, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.922629] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e74f46-7502-4864-8d02-ac9345431746 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.930454] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cbe73e5-2907-4d2c-9cac-b6cb99673b6d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.960793] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54fcf876-165a-4222-9535-4bc1d4d67cd1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.967784] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a7667d-19d4-487d-bf01-2b3f3846b0a2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.980800] env[68638]: DEBUG nova.compute.provider_tree [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1258.165753] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834626, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.287783] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1258.484384] env[68638]: DEBUG nova.scheduler.client.report [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1258.666310] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834626, 'name': CreateVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.989345] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.144s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1258.989885] env[68638]: DEBUG nova.compute.manager [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1259.166917] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834626, 'name': CreateVM_Task, 'duration_secs': 1.304181} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.167122] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1259.167716] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1259.167886] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1259.168259] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1259.168508] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0f6335c-b718-4434-851f-e5ffe55be92b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.172697] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1259.172697] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5221d0b7-3947-ab37-1a02-b99b6c1c2bb8" [ 1259.172697] env[68638]: _type = "Task" [ 1259.172697] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.181139] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5221d0b7-3947-ab37-1a02-b99b6c1c2bb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.494763] env[68638]: DEBUG nova.compute.utils [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1259.496248] env[68638]: DEBUG nova.compute.manager [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1259.496436] env[68638]: DEBUG nova.network.neutron [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1259.533631] env[68638]: DEBUG nova.policy [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '87dbe1b58a124d8ba72432b58a711496', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03a342a1ef674059b9ab1a5dc050a82d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1259.683373] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5221d0b7-3947-ab37-1a02-b99b6c1c2bb8, 'name': SearchDatastore_Task, 'duration_secs': 0.009722} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.683750] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1259.683897] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1259.684153] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1259.684300] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1259.684544] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1259.684892] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d0d71aff-9e5e-4c73-a478-b19d209cd7a1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.693367] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1259.693552] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1259.694260] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0a5de56-5002-45d8-8e1f-3f0d5d6c23b6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.699200] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1259.699200] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52205492-e793-8e91-998b-94a286577640" [ 1259.699200] env[68638]: _type = "Task" [ 1259.699200] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.706360] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52205492-e793-8e91-998b-94a286577640, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.791332] env[68638]: DEBUG nova.network.neutron [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Successfully created port: 2253d838-fc66-4ff8-b86d-5d516a933a07 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1260.000042] env[68638]: DEBUG nova.compute.manager [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1260.209121] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52205492-e793-8e91-998b-94a286577640, 'name': SearchDatastore_Task, 'duration_secs': 0.00876} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.209849] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26ad392e-2382-4d7d-a09c-33130f436337 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.214510] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1260.214510] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]520b6292-b566-be19-dc0c-7620dc0fab5e" [ 1260.214510] env[68638]: _type = "Task" [ 1260.214510] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.221924] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]520b6292-b566-be19-dc0c-7620dc0fab5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.725701] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]520b6292-b566-be19-dc0c-7620dc0fab5e, 'name': SearchDatastore_Task, 'duration_secs': 0.009281} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.726097] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1260.726183] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 2452dd7a-5f16-4094-9407-59405eed572b/2452dd7a-5f16-4094-9407-59405eed572b.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1260.726446] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0da3980f-bce9-492e-ac98-86476dc85496 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.733075] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1260.733075] env[68638]: value = "task-2834627" [ 1260.733075] env[68638]: _type = "Task" [ 1260.733075] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.740567] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834627, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.010234] env[68638]: DEBUG nova.compute.manager [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1261.033539] env[68638]: DEBUG nova.virt.hardware [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1261.033835] env[68638]: DEBUG nova.virt.hardware [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1261.034060] env[68638]: DEBUG nova.virt.hardware [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1261.034271] env[68638]: DEBUG nova.virt.hardware [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1261.034738] env[68638]: DEBUG nova.virt.hardware [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1261.034738] env[68638]: DEBUG nova.virt.hardware [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1261.034904] env[68638]: DEBUG nova.virt.hardware [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1261.034987] env[68638]: DEBUG nova.virt.hardware [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1261.035220] env[68638]: DEBUG nova.virt.hardware [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1261.035403] env[68638]: DEBUG nova.virt.hardware [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1261.035612] env[68638]: DEBUG nova.virt.hardware [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1261.036719] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e784a108-bab0-429c-b161-1d69fdf97f99 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.045633] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6378e811-e605-42e1-8b38-bd8c82bbee8f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.155079] env[68638]: DEBUG nova.compute.manager [req-5fac7881-f4d5-4a5a-ab8a-600e09d5b468 req-7df42992-2b49-46fa-adac-21a8029140f6 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Received event network-vif-plugged-2253d838-fc66-4ff8-b86d-5d516a933a07 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1261.155322] env[68638]: DEBUG oslo_concurrency.lockutils [req-5fac7881-f4d5-4a5a-ab8a-600e09d5b468 req-7df42992-2b49-46fa-adac-21a8029140f6 service nova] Acquiring lock "230f1a80-cf88-41c1-984f-d687932461d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.155531] env[68638]: DEBUG oslo_concurrency.lockutils [req-5fac7881-f4d5-4a5a-ab8a-600e09d5b468 req-7df42992-2b49-46fa-adac-21a8029140f6 service nova] Lock "230f1a80-cf88-41c1-984f-d687932461d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.155717] env[68638]: DEBUG oslo_concurrency.lockutils [req-5fac7881-f4d5-4a5a-ab8a-600e09d5b468 req-7df42992-2b49-46fa-adac-21a8029140f6 service nova] Lock "230f1a80-cf88-41c1-984f-d687932461d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.155885] env[68638]: DEBUG nova.compute.manager [req-5fac7881-f4d5-4a5a-ab8a-600e09d5b468 req-7df42992-2b49-46fa-adac-21a8029140f6 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] No waiting events found dispatching network-vif-plugged-2253d838-fc66-4ff8-b86d-5d516a933a07 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1261.156095] env[68638]: WARNING nova.compute.manager [req-5fac7881-f4d5-4a5a-ab8a-600e09d5b468 req-7df42992-2b49-46fa-adac-21a8029140f6 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Received unexpected event network-vif-plugged-2253d838-fc66-4ff8-b86d-5d516a933a07 for instance with vm_state building and task_state spawning. [ 1261.243607] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834627, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.432027} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.243902] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 2452dd7a-5f16-4094-9407-59405eed572b/2452dd7a-5f16-4094-9407-59405eed572b.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1261.244141] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1261.244460] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fcdcfd97-02f9-4938-bdba-70e95681d213 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.251702] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1261.251702] env[68638]: value = "task-2834628" [ 1261.251702] env[68638]: _type = "Task" [ 1261.251702] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.258848] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834628, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.269467] env[68638]: DEBUG nova.network.neutron [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Successfully updated port: 2253d838-fc66-4ff8-b86d-5d516a933a07 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1261.698046] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "25c35c36-71c9-48cd-b7e4-6293eef890e5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.698046] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "25c35c36-71c9-48cd-b7e4-6293eef890e5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.698259] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "25c35c36-71c9-48cd-b7e4-6293eef890e5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.698381] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "25c35c36-71c9-48cd-b7e4-6293eef890e5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.698512] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "25c35c36-71c9-48cd-b7e4-6293eef890e5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.700739] env[68638]: INFO nova.compute.manager [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Terminating instance [ 1261.762215] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834628, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067312} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.762675] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1261.763330] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7abe1a1a-db4b-4b71-8ba7-05afac14fb0e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.777031] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "refresh_cache-230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1261.777157] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired lock "refresh_cache-230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1261.777259] env[68638]: DEBUG nova.network.neutron [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1261.786788] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Reconfiguring VM instance instance-00000077 to attach disk [datastore2] 2452dd7a-5f16-4094-9407-59405eed572b/2452dd7a-5f16-4094-9407-59405eed572b.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1261.787728] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1cf03af5-8585-4b0a-b8f3-1602d5153dcd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.807566] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1261.807566] env[68638]: value = "task-2834629" [ 1261.807566] env[68638]: _type = "Task" [ 1261.807566] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.816638] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834629, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.205110] env[68638]: DEBUG nova.compute.manager [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1262.205388] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1262.206346] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c4935e4-18f4-4b60-ae5d-cb7d49e3a74d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.213967] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1262.214208] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73524d2d-70c9-41ef-bdff-0c6b03dd6b39 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.220262] env[68638]: DEBUG oslo_vmware.api [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1262.220262] env[68638]: value = "task-2834630" [ 1262.220262] env[68638]: _type = "Task" [ 1262.220262] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.227886] env[68638]: DEBUG oslo_vmware.api [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834630, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.317944] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834629, 'name': ReconfigVM_Task, 'duration_secs': 0.275629} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.318754] env[68638]: DEBUG nova.network.neutron [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1262.320684] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Reconfigured VM instance instance-00000077 to attach disk [datastore2] 2452dd7a-5f16-4094-9407-59405eed572b/2452dd7a-5f16-4094-9407-59405eed572b.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1262.321807] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-193845dd-f67e-4410-848d-d81bb11da8d4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.328357] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1262.328357] env[68638]: value = "task-2834631" [ 1262.328357] env[68638]: _type = "Task" [ 1262.328357] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.336446] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834631, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.457803] env[68638]: DEBUG nova.network.neutron [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Updating instance_info_cache with network_info: [{"id": "2253d838-fc66-4ff8-b86d-5d516a933a07", "address": "fa:16:3e:06:e8:21", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2253d838-fc", "ovs_interfaceid": "2253d838-fc66-4ff8-b86d-5d516a933a07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1262.730296] env[68638]: DEBUG oslo_vmware.api [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834630, 'name': PowerOffVM_Task, 'duration_secs': 0.200443} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.730497] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1262.730707] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1262.730961] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-87d32810-68fd-4fd6-9662-4ad8daa949b9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.790337] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1262.790619] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1262.790762] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Deleting the datastore file [datastore1] 25c35c36-71c9-48cd-b7e4-6293eef890e5 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1262.791036] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-98e21e4d-c575-4127-85be-041c2ef8ae3e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.796974] env[68638]: DEBUG oslo_vmware.api [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for the task: (returnval){ [ 1262.796974] env[68638]: value = "task-2834633" [ 1262.796974] env[68638]: _type = "Task" [ 1262.796974] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.804585] env[68638]: DEBUG oslo_vmware.api [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834633, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.836440] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834631, 'name': Rename_Task, 'duration_secs': 0.138573} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.836696] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1262.836939] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1d7ab5bb-faab-4fa4-807f-e1307126828e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.842993] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1262.842993] env[68638]: value = "task-2834634" [ 1262.842993] env[68638]: _type = "Task" [ 1262.842993] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.851232] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834634, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.961172] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Releasing lock "refresh_cache-230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1262.961517] env[68638]: DEBUG nova.compute.manager [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Instance network_info: |[{"id": "2253d838-fc66-4ff8-b86d-5d516a933a07", "address": "fa:16:3e:06:e8:21", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2253d838-fc", "ovs_interfaceid": "2253d838-fc66-4ff8-b86d-5d516a933a07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1262.961981] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:e8:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2180b40f-2bb0-47da-ba80-c2fbe7f98af0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2253d838-fc66-4ff8-b86d-5d516a933a07', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1262.969595] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1262.969726] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1262.969939] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-feafbb01-c74e-4d14-acad-36477242f1a9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.988683] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1262.988683] env[68638]: value = "task-2834635" [ 1262.988683] env[68638]: _type = "Task" [ 1262.988683] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.996404] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834635, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.180257] env[68638]: DEBUG nova.compute.manager [req-cac5208c-23ce-419a-8f4f-3c96a355cc1e req-29148842-06fc-4362-bf3e-44683f16535b service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Received event network-changed-2253d838-fc66-4ff8-b86d-5d516a933a07 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1263.180474] env[68638]: DEBUG nova.compute.manager [req-cac5208c-23ce-419a-8f4f-3c96a355cc1e req-29148842-06fc-4362-bf3e-44683f16535b service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Refreshing instance network info cache due to event network-changed-2253d838-fc66-4ff8-b86d-5d516a933a07. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1263.180641] env[68638]: DEBUG oslo_concurrency.lockutils [req-cac5208c-23ce-419a-8f4f-3c96a355cc1e req-29148842-06fc-4362-bf3e-44683f16535b service nova] Acquiring lock "refresh_cache-230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1263.180789] env[68638]: DEBUG oslo_concurrency.lockutils [req-cac5208c-23ce-419a-8f4f-3c96a355cc1e req-29148842-06fc-4362-bf3e-44683f16535b service nova] Acquired lock "refresh_cache-230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1263.180951] env[68638]: DEBUG nova.network.neutron [req-cac5208c-23ce-419a-8f4f-3c96a355cc1e req-29148842-06fc-4362-bf3e-44683f16535b service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Refreshing network info cache for port 2253d838-fc66-4ff8-b86d-5d516a933a07 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1263.309776] env[68638]: DEBUG oslo_vmware.api [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Task: {'id': task-2834633, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.130146} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.310158] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1263.310428] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1263.310677] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1263.310917] env[68638]: INFO nova.compute.manager [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1263.311281] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1263.311561] env[68638]: DEBUG nova.compute.manager [-] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1263.311691] env[68638]: DEBUG nova.network.neutron [-] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1263.353019] env[68638]: DEBUG oslo_vmware.api [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834634, 'name': PowerOnVM_Task, 'duration_secs': 0.464504} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.353332] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1263.353616] env[68638]: DEBUG nova.compute.manager [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1263.354409] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a6c5cd-a3a6-434e-a88e-a044324e4eca {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.499014] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834635, 'name': CreateVM_Task, 'duration_secs': 0.43445} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.499606] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1263.500346] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1263.500538] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1263.500863] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1263.501153] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ca8f389-be3d-4e5c-a7f4-94b7cd643fb4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.506377] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1263.506377] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52c81ff0-cb1d-1045-9db4-9d851289f0f2" [ 1263.506377] env[68638]: _type = "Task" [ 1263.506377] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.515413] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52c81ff0-cb1d-1045-9db4-9d851289f0f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.874338] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1263.874338] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1263.874691] env[68638]: DEBUG nova.objects.instance [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68638) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1263.926266] env[68638]: DEBUG nova.network.neutron [req-cac5208c-23ce-419a-8f4f-3c96a355cc1e req-29148842-06fc-4362-bf3e-44683f16535b service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Updated VIF entry in instance network info cache for port 2253d838-fc66-4ff8-b86d-5d516a933a07. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1263.926754] env[68638]: DEBUG nova.network.neutron [req-cac5208c-23ce-419a-8f4f-3c96a355cc1e req-29148842-06fc-4362-bf3e-44683f16535b service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Updating instance_info_cache with network_info: [{"id": "2253d838-fc66-4ff8-b86d-5d516a933a07", "address": "fa:16:3e:06:e8:21", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2253d838-fc", "ovs_interfaceid": "2253d838-fc66-4ff8-b86d-5d516a933a07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1264.018893] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52c81ff0-cb1d-1045-9db4-9d851289f0f2, 'name': SearchDatastore_Task, 'duration_secs': 0.010945} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.020798] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1264.021128] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1264.021598] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1264.021818] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1264.022086] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1264.022919] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c91914d-b540-49c4-9ab8-185f11265bb6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.031826] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1264.032390] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1264.033401] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab63e1d5-20f9-4a2e-8cb3-e93ee6181a40 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.039412] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1264.039412] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52cec650-ea80-8ba7-148a-710ab3b3cd56" [ 1264.039412] env[68638]: _type = "Task" [ 1264.039412] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.047268] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52cec650-ea80-8ba7-148a-710ab3b3cd56, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.430082] env[68638]: DEBUG oslo_concurrency.lockutils [req-cac5208c-23ce-419a-8f4f-3c96a355cc1e req-29148842-06fc-4362-bf3e-44683f16535b service nova] Releasing lock "refresh_cache-230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1264.523534] env[68638]: DEBUG nova.network.neutron [-] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1264.550318] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52cec650-ea80-8ba7-148a-710ab3b3cd56, 'name': SearchDatastore_Task, 'duration_secs': 0.008693} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.551151] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-014e1003-f1b6-46ba-ba6b-2fad6cde7910 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.556352] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1264.556352] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5261e163-f88e-7953-8524-5b76824ef754" [ 1264.556352] env[68638]: _type = "Task" [ 1264.556352] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.563812] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5261e163-f88e-7953-8524-5b76824ef754, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.884226] env[68638]: DEBUG oslo_concurrency.lockutils [None req-3ea77471-b15f-4828-9007-9ab559184918 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1265.026789] env[68638]: INFO nova.compute.manager [-] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Took 1.71 seconds to deallocate network for instance. [ 1265.067740] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5261e163-f88e-7953-8524-5b76824ef754, 'name': SearchDatastore_Task, 'duration_secs': 0.010075} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.068040] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1265.068339] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 230f1a80-cf88-41c1-984f-d687932461d7/230f1a80-cf88-41c1-984f-d687932461d7.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1265.068627] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-efe449b2-fc0b-4224-8bbb-a88d0f648a6a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.075504] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1265.075504] env[68638]: value = "task-2834636" [ 1265.075504] env[68638]: _type = "Task" [ 1265.075504] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.084314] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834636, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.210206] env[68638]: DEBUG nova.compute.manager [req-1dd45605-9d1b-4ecf-9bfe-d3db54e345ae req-c766d392-4512-45c6-bcf7-92fb6621ec1f service nova] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Received event network-vif-deleted-3199e8a3-335c-43ff-be19-3881b85a0203 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1265.534834] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.535213] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.535476] env[68638]: DEBUG nova.objects.instance [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lazy-loading 'resources' on Instance uuid 25c35c36-71c9-48cd-b7e4-6293eef890e5 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1265.587132] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834636, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.086204] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834636, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.57498} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.088945] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 230f1a80-cf88-41c1-984f-d687932461d7/230f1a80-cf88-41c1-984f-d687932461d7.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1266.089235] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1266.089762] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b5aac7f6-ec63-4085-9c5f-8f37bb233720 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.096852] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1266.096852] env[68638]: value = "task-2834637" [ 1266.096852] env[68638]: _type = "Task" [ 1266.096852] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.108726] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834637, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.127565] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1847d3b8-8acd-47e5-8736-4b9bded74f8a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.134930] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e97811-da1e-4441-8341-961fc4853f1c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.167972] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e4ba4f5-cc15-4d01-b693-ae11f6871d06 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.175568] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d6fef6-80c8-4496-830f-da4a502a03d4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.189497] env[68638]: DEBUG nova.compute.provider_tree [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1266.606918] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834637, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.185728} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.607212] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1266.607966] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c751b81-9c29-4342-b354-a85973b2cbdd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.629706] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] 230f1a80-cf88-41c1-984f-d687932461d7/230f1a80-cf88-41c1-984f-d687932461d7.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1266.629950] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6d16b6b-d958-419b-bd48-ccf26605a66c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.649341] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1266.649341] env[68638]: value = "task-2834638" [ 1266.649341] env[68638]: _type = "Task" [ 1266.649341] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.657340] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834638, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.692524] env[68638]: DEBUG nova.scheduler.client.report [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1267.159402] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834638, 'name': ReconfigVM_Task, 'duration_secs': 0.340015} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.159983] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Reconfigured VM instance instance-0000007a to attach disk [datastore2] 230f1a80-cf88-41c1-984f-d687932461d7/230f1a80-cf88-41c1-984f-d687932461d7.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1267.160296] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e5e46a86-8ef6-418d-a771-6ade13c43f9d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.165874] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1267.165874] env[68638]: value = "task-2834639" [ 1267.165874] env[68638]: _type = "Task" [ 1267.165874] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.172845] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834639, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.197812] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.663s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1267.218318] env[68638]: INFO nova.scheduler.client.report [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Deleted allocations for instance 25c35c36-71c9-48cd-b7e4-6293eef890e5 [ 1267.675519] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834639, 'name': Rename_Task, 'duration_secs': 0.145658} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.675794] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1267.676044] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-92f333c2-3abb-41e6-8b58-9d48bc640e86 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.682555] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1267.682555] env[68638]: value = "task-2834640" [ 1267.682555] env[68638]: _type = "Task" [ 1267.682555] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.690580] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834640, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.726758] env[68638]: DEBUG oslo_concurrency.lockutils [None req-fe62a3ab-a16a-4ce5-8d4b-cce4f76d0316 tempest-AttachVolumeShelveTestJSON-1826028629 tempest-AttachVolumeShelveTestJSON-1826028629-project-member] Lock "25c35c36-71c9-48cd-b7e4-6293eef890e5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.029s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1268.195810] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834640, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.693313] env[68638]: DEBUG oslo_vmware.api [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834640, 'name': PowerOnVM_Task, 'duration_secs': 0.880969} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.693614] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1268.693775] env[68638]: INFO nova.compute.manager [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Took 7.68 seconds to spawn the instance on the hypervisor. [ 1268.693952] env[68638]: DEBUG nova.compute.manager [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1268.694711] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd3ca0f-e2f7-4c02-be94-c3d5982016a0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.211773] env[68638]: INFO nova.compute.manager [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Took 12.39 seconds to build instance. [ 1269.714149] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a643d902-c471-46fe-9f5d-ced89f4a0e98 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "230f1a80-cf88-41c1-984f-d687932461d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.896s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1270.146107] env[68638]: DEBUG nova.compute.manager [req-205b68de-9d41-4025-8d54-4c760793a956 req-0cdbb377-e0a3-4c39-b721-b7c090737280 service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Received event network-changed-d0d7d883-914c-4c61-b0d1-3a31b2df0943 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1270.146345] env[68638]: DEBUG nova.compute.manager [req-205b68de-9d41-4025-8d54-4c760793a956 req-0cdbb377-e0a3-4c39-b721-b7c090737280 service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Refreshing instance network info cache due to event network-changed-d0d7d883-914c-4c61-b0d1-3a31b2df0943. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1270.146600] env[68638]: DEBUG oslo_concurrency.lockutils [req-205b68de-9d41-4025-8d54-4c760793a956 req-0cdbb377-e0a3-4c39-b721-b7c090737280 service nova] Acquiring lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1270.146783] env[68638]: DEBUG oslo_concurrency.lockutils [req-205b68de-9d41-4025-8d54-4c760793a956 req-0cdbb377-e0a3-4c39-b721-b7c090737280 service nova] Acquired lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1270.147121] env[68638]: DEBUG nova.network.neutron [req-205b68de-9d41-4025-8d54-4c760793a956 req-0cdbb377-e0a3-4c39-b721-b7c090737280 service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Refreshing network info cache for port d0d7d883-914c-4c61-b0d1-3a31b2df0943 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1270.843416] env[68638]: DEBUG nova.network.neutron [req-205b68de-9d41-4025-8d54-4c760793a956 req-0cdbb377-e0a3-4c39-b721-b7c090737280 service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Updated VIF entry in instance network info cache for port d0d7d883-914c-4c61-b0d1-3a31b2df0943. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1270.843810] env[68638]: DEBUG nova.network.neutron [req-205b68de-9d41-4025-8d54-4c760793a956 req-0cdbb377-e0a3-4c39-b721-b7c090737280 service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Updating instance_info_cache with network_info: [{"id": "d0d7d883-914c-4c61-b0d1-3a31b2df0943", "address": "fa:16:3e:30:cd:8c", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0d7d883-91", "ovs_interfaceid": "d0d7d883-914c-4c61-b0d1-3a31b2df0943", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1271.347287] env[68638]: DEBUG oslo_concurrency.lockutils [req-205b68de-9d41-4025-8d54-4c760793a956 req-0cdbb377-e0a3-4c39-b721-b7c090737280 service nova] Releasing lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1271.393079] env[68638]: DEBUG nova.compute.manager [req-00375e4d-c556-422d-9e45-959650b9b5a7 req-cf98cdf7-eaef-467b-91c9-9575e8a48bc2 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Received event network-changed-2253d838-fc66-4ff8-b86d-5d516a933a07 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1271.393537] env[68638]: DEBUG nova.compute.manager [req-00375e4d-c556-422d-9e45-959650b9b5a7 req-cf98cdf7-eaef-467b-91c9-9575e8a48bc2 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Refreshing instance network info cache due to event network-changed-2253d838-fc66-4ff8-b86d-5d516a933a07. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1271.393836] env[68638]: DEBUG oslo_concurrency.lockutils [req-00375e4d-c556-422d-9e45-959650b9b5a7 req-cf98cdf7-eaef-467b-91c9-9575e8a48bc2 service nova] Acquiring lock "refresh_cache-230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1271.394131] env[68638]: DEBUG oslo_concurrency.lockutils [req-00375e4d-c556-422d-9e45-959650b9b5a7 req-cf98cdf7-eaef-467b-91c9-9575e8a48bc2 service nova] Acquired lock "refresh_cache-230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1271.394410] env[68638]: DEBUG nova.network.neutron [req-00375e4d-c556-422d-9e45-959650b9b5a7 req-cf98cdf7-eaef-467b-91c9-9575e8a48bc2 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Refreshing network info cache for port 2253d838-fc66-4ff8-b86d-5d516a933a07 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1272.070931] env[68638]: DEBUG nova.network.neutron [req-00375e4d-c556-422d-9e45-959650b9b5a7 req-cf98cdf7-eaef-467b-91c9-9575e8a48bc2 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Updated VIF entry in instance network info cache for port 2253d838-fc66-4ff8-b86d-5d516a933a07. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1272.071299] env[68638]: DEBUG nova.network.neutron [req-00375e4d-c556-422d-9e45-959650b9b5a7 req-cf98cdf7-eaef-467b-91c9-9575e8a48bc2 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Updating instance_info_cache with network_info: [{"id": "2253d838-fc66-4ff8-b86d-5d516a933a07", "address": "fa:16:3e:06:e8:21", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2253d838-fc", "ovs_interfaceid": "2253d838-fc66-4ff8-b86d-5d516a933a07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1272.175550] env[68638]: DEBUG nova.compute.manager [req-5993ee50-e9dc-48b1-8931-071ce662b183 req-7db55cfc-3c0c-4559-9626-f5a088382725 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Received event network-changed-2253d838-fc66-4ff8-b86d-5d516a933a07 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1272.175770] env[68638]: DEBUG nova.compute.manager [req-5993ee50-e9dc-48b1-8931-071ce662b183 req-7db55cfc-3c0c-4559-9626-f5a088382725 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Refreshing instance network info cache due to event network-changed-2253d838-fc66-4ff8-b86d-5d516a933a07. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1272.176367] env[68638]: DEBUG oslo_concurrency.lockutils [req-5993ee50-e9dc-48b1-8931-071ce662b183 req-7db55cfc-3c0c-4559-9626-f5a088382725 service nova] Acquiring lock "refresh_cache-230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1272.574750] env[68638]: DEBUG oslo_concurrency.lockutils [req-00375e4d-c556-422d-9e45-959650b9b5a7 req-cf98cdf7-eaef-467b-91c9-9575e8a48bc2 service nova] Releasing lock "refresh_cache-230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1272.575207] env[68638]: DEBUG oslo_concurrency.lockutils [req-5993ee50-e9dc-48b1-8931-071ce662b183 req-7db55cfc-3c0c-4559-9626-f5a088382725 service nova] Acquired lock "refresh_cache-230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1272.575403] env[68638]: DEBUG nova.network.neutron [req-5993ee50-e9dc-48b1-8931-071ce662b183 req-7db55cfc-3c0c-4559-9626-f5a088382725 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Refreshing network info cache for port 2253d838-fc66-4ff8-b86d-5d516a933a07 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1273.265866] env[68638]: DEBUG nova.network.neutron [req-5993ee50-e9dc-48b1-8931-071ce662b183 req-7db55cfc-3c0c-4559-9626-f5a088382725 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Updated VIF entry in instance network info cache for port 2253d838-fc66-4ff8-b86d-5d516a933a07. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1273.266272] env[68638]: DEBUG nova.network.neutron [req-5993ee50-e9dc-48b1-8931-071ce662b183 req-7db55cfc-3c0c-4559-9626-f5a088382725 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Updating instance_info_cache with network_info: [{"id": "2253d838-fc66-4ff8-b86d-5d516a933a07", "address": "fa:16:3e:06:e8:21", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2253d838-fc", "ovs_interfaceid": "2253d838-fc66-4ff8-b86d-5d516a933a07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1273.430289] env[68638]: DEBUG nova.compute.manager [req-a94ae62b-66d9-4b5f-a844-7921974b9d54 req-18d81d58-fe02-431a-b4a6-2bd5f67f4d35 service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Received event network-changed-d0d7d883-914c-4c61-b0d1-3a31b2df0943 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1273.430496] env[68638]: DEBUG nova.compute.manager [req-a94ae62b-66d9-4b5f-a844-7921974b9d54 req-18d81d58-fe02-431a-b4a6-2bd5f67f4d35 service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Refreshing instance network info cache due to event network-changed-d0d7d883-914c-4c61-b0d1-3a31b2df0943. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1273.430714] env[68638]: DEBUG oslo_concurrency.lockutils [req-a94ae62b-66d9-4b5f-a844-7921974b9d54 req-18d81d58-fe02-431a-b4a6-2bd5f67f4d35 service nova] Acquiring lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1273.430858] env[68638]: DEBUG oslo_concurrency.lockutils [req-a94ae62b-66d9-4b5f-a844-7921974b9d54 req-18d81d58-fe02-431a-b4a6-2bd5f67f4d35 service nova] Acquired lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1273.431030] env[68638]: DEBUG nova.network.neutron [req-a94ae62b-66d9-4b5f-a844-7921974b9d54 req-18d81d58-fe02-431a-b4a6-2bd5f67f4d35 service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Refreshing network info cache for port d0d7d883-914c-4c61-b0d1-3a31b2df0943 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1273.769732] env[68638]: DEBUG oslo_concurrency.lockutils [req-5993ee50-e9dc-48b1-8931-071ce662b183 req-7db55cfc-3c0c-4559-9626-f5a088382725 service nova] Releasing lock "refresh_cache-230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1274.126919] env[68638]: DEBUG nova.network.neutron [req-a94ae62b-66d9-4b5f-a844-7921974b9d54 req-18d81d58-fe02-431a-b4a6-2bd5f67f4d35 service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Updated VIF entry in instance network info cache for port d0d7d883-914c-4c61-b0d1-3a31b2df0943. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1274.127328] env[68638]: DEBUG nova.network.neutron [req-a94ae62b-66d9-4b5f-a844-7921974b9d54 req-18d81d58-fe02-431a-b4a6-2bd5f67f4d35 service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Updating instance_info_cache with network_info: [{"id": "d0d7d883-914c-4c61-b0d1-3a31b2df0943", "address": "fa:16:3e:30:cd:8c", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.214", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0d7d883-91", "ovs_interfaceid": "d0d7d883-914c-4c61-b0d1-3a31b2df0943", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1274.630425] env[68638]: DEBUG oslo_concurrency.lockutils [req-a94ae62b-66d9-4b5f-a844-7921974b9d54 req-18d81d58-fe02-431a-b4a6-2bd5f67f4d35 service nova] Releasing lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1285.674671] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "interface-e6bb1034-e440-4fb2-ba56-a734c4f67cdb-1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1285.675120] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "interface-e6bb1034-e440-4fb2-ba56-a734c4f67cdb-1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1285.675288] env[68638]: DEBUG nova.objects.instance [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lazy-loading 'flavor' on Instance uuid e6bb1034-e440-4fb2-ba56-a734c4f67cdb {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1286.258410] env[68638]: DEBUG nova.objects.instance [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lazy-loading 'pci_requests' on Instance uuid e6bb1034-e440-4fb2-ba56-a734c4f67cdb {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1286.760947] env[68638]: DEBUG nova.objects.base [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=68638) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1286.761385] env[68638]: DEBUG nova.network.neutron [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1286.819076] env[68638]: DEBUG nova.policy [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '87dbe1b58a124d8ba72432b58a711496', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03a342a1ef674059b9ab1a5dc050a82d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1288.167565] env[68638]: DEBUG nova.compute.manager [req-4af0692f-ea94-449e-a342-7a8133d8f92d req-a8c387b2-2306-43db-9109-a9a05f693dc7 service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Received event network-vif-plugged-1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1288.167849] env[68638]: DEBUG oslo_concurrency.lockutils [req-4af0692f-ea94-449e-a342-7a8133d8f92d req-a8c387b2-2306-43db-9109-a9a05f693dc7 service nova] Acquiring lock "e6bb1034-e440-4fb2-ba56-a734c4f67cdb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1288.167988] env[68638]: DEBUG oslo_concurrency.lockutils [req-4af0692f-ea94-449e-a342-7a8133d8f92d req-a8c387b2-2306-43db-9109-a9a05f693dc7 service nova] Lock "e6bb1034-e440-4fb2-ba56-a734c4f67cdb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1288.168203] env[68638]: DEBUG oslo_concurrency.lockutils [req-4af0692f-ea94-449e-a342-7a8133d8f92d req-a8c387b2-2306-43db-9109-a9a05f693dc7 service nova] Lock "e6bb1034-e440-4fb2-ba56-a734c4f67cdb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1288.168374] env[68638]: DEBUG nova.compute.manager [req-4af0692f-ea94-449e-a342-7a8133d8f92d req-a8c387b2-2306-43db-9109-a9a05f693dc7 service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] No waiting events found dispatching network-vif-plugged-1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1288.168567] env[68638]: WARNING nova.compute.manager [req-4af0692f-ea94-449e-a342-7a8133d8f92d req-a8c387b2-2306-43db-9109-a9a05f693dc7 service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Received unexpected event network-vif-plugged-1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6 for instance with vm_state active and task_state None. [ 1288.249041] env[68638]: DEBUG nova.network.neutron [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Successfully updated port: 1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1288.752148] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1288.752360] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1288.752884] env[68638]: DEBUG nova.network.neutron [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1289.288749] env[68638]: WARNING nova.network.neutron [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] cd5da8a3-b68c-498e-8922-d556cd2178c4 already exists in list: networks containing: ['cd5da8a3-b68c-498e-8922-d556cd2178c4']. ignoring it [ 1289.531885] env[68638]: DEBUG nova.network.neutron [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Updating instance_info_cache with network_info: [{"id": "d0d7d883-914c-4c61-b0d1-3a31b2df0943", "address": "fa:16:3e:30:cd:8c", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.214", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0d7d883-91", "ovs_interfaceid": "d0d7d883-914c-4c61-b0d1-3a31b2df0943", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6", "address": "fa:16:3e:21:c6:77", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e9a2f8e-a2", "ovs_interfaceid": "1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1290.035131] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Releasing lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1290.035785] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1290.035948] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired lock "e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1290.036864] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b6661d0-759a-4a7a-b242-2630fa6c7a44 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.053586] env[68638]: DEBUG nova.virt.hardware [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1290.053860] env[68638]: DEBUG nova.virt.hardware [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1290.054057] env[68638]: DEBUG nova.virt.hardware [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1290.054256] env[68638]: DEBUG nova.virt.hardware [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1290.054409] env[68638]: DEBUG nova.virt.hardware [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1290.054561] env[68638]: DEBUG nova.virt.hardware [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1290.054770] env[68638]: DEBUG nova.virt.hardware [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1290.054934] env[68638]: DEBUG nova.virt.hardware [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1290.055126] env[68638]: DEBUG nova.virt.hardware [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1290.055317] env[68638]: DEBUG nova.virt.hardware [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1290.055499] env[68638]: DEBUG nova.virt.hardware [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1290.061845] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Reconfiguring VM to attach interface {{(pid=68638) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1290.062465] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e55738e-3222-494c-a983-b5436f63e4fe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.080255] env[68638]: DEBUG oslo_vmware.api [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1290.080255] env[68638]: value = "task-2834642" [ 1290.080255] env[68638]: _type = "Task" [ 1290.080255] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.087803] env[68638]: DEBUG oslo_vmware.api [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834642, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.193972] env[68638]: DEBUG nova.compute.manager [req-aca2b93d-6df0-4ae6-b29b-034a7f8b09c2 req-891e3467-761f-4141-808f-c64cc031a3fd service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Received event network-changed-1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1290.194157] env[68638]: DEBUG nova.compute.manager [req-aca2b93d-6df0-4ae6-b29b-034a7f8b09c2 req-891e3467-761f-4141-808f-c64cc031a3fd service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Refreshing instance network info cache due to event network-changed-1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1290.194382] env[68638]: DEBUG oslo_concurrency.lockutils [req-aca2b93d-6df0-4ae6-b29b-034a7f8b09c2 req-891e3467-761f-4141-808f-c64cc031a3fd service nova] Acquiring lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1290.194524] env[68638]: DEBUG oslo_concurrency.lockutils [req-aca2b93d-6df0-4ae6-b29b-034a7f8b09c2 req-891e3467-761f-4141-808f-c64cc031a3fd service nova] Acquired lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1290.194688] env[68638]: DEBUG nova.network.neutron [req-aca2b93d-6df0-4ae6-b29b-034a7f8b09c2 req-891e3467-761f-4141-808f-c64cc031a3fd service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Refreshing network info cache for port 1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1290.590027] env[68638]: DEBUG oslo_vmware.api [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834642, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.880697] env[68638]: DEBUG nova.network.neutron [req-aca2b93d-6df0-4ae6-b29b-034a7f8b09c2 req-891e3467-761f-4141-808f-c64cc031a3fd service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Updated VIF entry in instance network info cache for port 1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1290.881198] env[68638]: DEBUG nova.network.neutron [req-aca2b93d-6df0-4ae6-b29b-034a7f8b09c2 req-891e3467-761f-4141-808f-c64cc031a3fd service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Updating instance_info_cache with network_info: [{"id": "d0d7d883-914c-4c61-b0d1-3a31b2df0943", "address": "fa:16:3e:30:cd:8c", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.214", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0d7d883-91", "ovs_interfaceid": "d0d7d883-914c-4c61-b0d1-3a31b2df0943", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6", "address": "fa:16:3e:21:c6:77", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e9a2f8e-a2", "ovs_interfaceid": "1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1291.090677] env[68638]: DEBUG oslo_vmware.api [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834642, 'name': ReconfigVM_Task, 'duration_secs': 0.541736} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.091179] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Releasing lock "e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1291.091402] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Reconfigured VM to attach interface {{(pid=68638) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1291.384175] env[68638]: DEBUG oslo_concurrency.lockutils [req-aca2b93d-6df0-4ae6-b29b-034a7f8b09c2 req-891e3467-761f-4141-808f-c64cc031a3fd service nova] Releasing lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1291.596246] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5e14b888-da40-45ce-87fc-95c883c410c9 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "interface-e6bb1034-e440-4fb2-ba56-a734c4f67cdb-1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 5.921s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1292.865136] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "interface-e6bb1034-e440-4fb2-ba56-a734c4f67cdb-1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1292.865482] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "interface-e6bb1034-e440-4fb2-ba56-a734c4f67cdb-1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1293.368755] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1293.369061] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired lock "e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1293.369995] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba5a3c8-819a-415f-a436-6303c90092ee {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.387873] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c392664f-5160-4550-8a5b-88feaf78d8bf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.413351] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Reconfiguring VM to detach interface {{(pid=68638) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1293.413351] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c9219b6-051f-4c37-a340-5987e2ea51e6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.431391] env[68638]: DEBUG oslo_vmware.api [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1293.431391] env[68638]: value = "task-2834643" [ 1293.431391] env[68638]: _type = "Task" [ 1293.431391] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.439241] env[68638]: DEBUG oslo_vmware.api [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834643, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.941400] env[68638]: DEBUG oslo_vmware.api [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834643, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.441910] env[68638]: DEBUG oslo_vmware.api [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834643, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.942019] env[68638]: DEBUG oslo_vmware.api [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834643, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.443017] env[68638]: DEBUG oslo_vmware.api [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834643, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.943461] env[68638]: DEBUG oslo_vmware.api [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834643, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.340809] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1296.445027] env[68638]: DEBUG oslo_vmware.api [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834643, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.947296] env[68638]: DEBUG oslo_vmware.api [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834643, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.445525] env[68638]: DEBUG oslo_vmware.api [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834643, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.946511] env[68638]: DEBUG oslo_vmware.api [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834643, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.447294] env[68638]: DEBUG oslo_vmware.api [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834643, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.948099] env[68638]: DEBUG oslo_vmware.api [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834643, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.448937] env[68638]: DEBUG oslo_vmware.api [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834643, 'name': ReconfigVM_Task, 'duration_secs': 5.739721} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.450038] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Releasing lock "e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1299.450038] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Reconfigured VM to detach interface {{(pid=68638) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1300.340225] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1300.340426] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1300.522018] env[68638]: DEBUG oslo_concurrency.lockutils [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "2452dd7a-5f16-4094-9407-59405eed572b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1300.522428] env[68638]: DEBUG oslo_concurrency.lockutils [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "2452dd7a-5f16-4094-9407-59405eed572b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1300.522491] env[68638]: DEBUG oslo_concurrency.lockutils [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "2452dd7a-5f16-4094-9407-59405eed572b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1300.522677] env[68638]: DEBUG oslo_concurrency.lockutils [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "2452dd7a-5f16-4094-9407-59405eed572b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1300.522850] env[68638]: DEBUG oslo_concurrency.lockutils [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "2452dd7a-5f16-4094-9407-59405eed572b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1300.526370] env[68638]: INFO nova.compute.manager [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Terminating instance [ 1300.720969] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1300.721181] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1300.721362] env[68638]: DEBUG nova.network.neutron [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1301.030314] env[68638]: DEBUG nova.compute.manager [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1301.030571] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1301.031502] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d5beab-f74d-488c-8b57-8bf61c69fdf1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.039362] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1301.039592] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-54783693-3163-4998-812a-ec4c0da34c73 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.045749] env[68638]: DEBUG oslo_vmware.api [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1301.045749] env[68638]: value = "task-2834644" [ 1301.045749] env[68638]: _type = "Task" [ 1301.045749] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.053021] env[68638]: DEBUG oslo_vmware.api [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834644, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.339673] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1301.339959] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1301.340127] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68638) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1301.472082] env[68638]: DEBUG nova.compute.manager [req-93450cdf-4d70-42a2-9de3-884d0d3e1049 req-f1ed0514-f945-4d39-aaff-ded7a9837d2f service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Received event network-changed-d0d7d883-914c-4c61-b0d1-3a31b2df0943 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1301.472295] env[68638]: DEBUG nova.compute.manager [req-93450cdf-4d70-42a2-9de3-884d0d3e1049 req-f1ed0514-f945-4d39-aaff-ded7a9837d2f service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Refreshing instance network info cache due to event network-changed-d0d7d883-914c-4c61-b0d1-3a31b2df0943. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1301.472489] env[68638]: DEBUG oslo_concurrency.lockutils [req-93450cdf-4d70-42a2-9de3-884d0d3e1049 req-f1ed0514-f945-4d39-aaff-ded7a9837d2f service nova] Acquiring lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1301.514225] env[68638]: INFO nova.network.neutron [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Port 1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1301.514570] env[68638]: DEBUG nova.network.neutron [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Updating instance_info_cache with network_info: [{"id": "d0d7d883-914c-4c61-b0d1-3a31b2df0943", "address": "fa:16:3e:30:cd:8c", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0d7d883-91", "ovs_interfaceid": "d0d7d883-914c-4c61-b0d1-3a31b2df0943", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1301.557802] env[68638]: DEBUG oslo_vmware.api [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834644, 'name': PowerOffVM_Task, 'duration_secs': 0.173033} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.558393] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1301.558393] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1301.558534] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6dbe1d8f-0df5-40ee-8c7f-9ecd10981196 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.622146] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1301.622386] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1301.622564] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Deleting the datastore file [datastore2] 2452dd7a-5f16-4094-9407-59405eed572b {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1301.622824] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b0b3855c-12e2-49eb-8597-a41773e6d03f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.629761] env[68638]: DEBUG oslo_vmware.api [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1301.629761] env[68638]: value = "task-2834646" [ 1301.629761] env[68638]: _type = "Task" [ 1301.629761] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.637548] env[68638]: DEBUG oslo_vmware.api [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834646, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.017726] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Releasing lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1302.019991] env[68638]: DEBUG oslo_concurrency.lockutils [req-93450cdf-4d70-42a2-9de3-884d0d3e1049 req-f1ed0514-f945-4d39-aaff-ded7a9837d2f service nova] Acquired lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1302.020211] env[68638]: DEBUG nova.network.neutron [req-93450cdf-4d70-42a2-9de3-884d0d3e1049 req-f1ed0514-f945-4d39-aaff-ded7a9837d2f service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Refreshing network info cache for port d0d7d883-914c-4c61-b0d1-3a31b2df0943 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1302.139013] env[68638]: DEBUG oslo_vmware.api [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834646, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137261} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.139295] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1302.139485] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1302.139686] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1302.139867] env[68638]: INFO nova.compute.manager [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1302.140120] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1302.140314] env[68638]: DEBUG nova.compute.manager [-] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1302.140416] env[68638]: DEBUG nova.network.neutron [-] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1302.156610] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "interface-230f1a80-cf88-41c1-984f-d687932461d7-1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1302.157038] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "interface-230f1a80-cf88-41c1-984f-d687932461d7-1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1302.157181] env[68638]: DEBUG nova.objects.instance [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lazy-loading 'flavor' on Instance uuid 230f1a80-cf88-41c1-984f-d687932461d7 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1302.336385] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1302.523985] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2cebe9a7-8086-4ae4-81b9-e3c7975a01be tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "interface-e6bb1034-e440-4fb2-ba56-a734c4f67cdb-1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.657s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1302.725543] env[68638]: DEBUG nova.network.neutron [req-93450cdf-4d70-42a2-9de3-884d0d3e1049 req-f1ed0514-f945-4d39-aaff-ded7a9837d2f service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Updated VIF entry in instance network info cache for port d0d7d883-914c-4c61-b0d1-3a31b2df0943. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1302.725902] env[68638]: DEBUG nova.network.neutron [req-93450cdf-4d70-42a2-9de3-884d0d3e1049 req-f1ed0514-f945-4d39-aaff-ded7a9837d2f service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Updating instance_info_cache with network_info: [{"id": "d0d7d883-914c-4c61-b0d1-3a31b2df0943", "address": "fa:16:3e:30:cd:8c", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0d7d883-91", "ovs_interfaceid": "d0d7d883-914c-4c61-b0d1-3a31b2df0943", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1302.747234] env[68638]: DEBUG nova.objects.instance [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lazy-loading 'pci_requests' on Instance uuid 230f1a80-cf88-41c1-984f-d687932461d7 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1303.058112] env[68638]: DEBUG nova.network.neutron [-] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1303.229092] env[68638]: DEBUG oslo_concurrency.lockutils [req-93450cdf-4d70-42a2-9de3-884d0d3e1049 req-f1ed0514-f945-4d39-aaff-ded7a9837d2f service nova] Releasing lock "refresh_cache-e6bb1034-e440-4fb2-ba56-a734c4f67cdb" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1303.250063] env[68638]: DEBUG nova.objects.base [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Object Instance<230f1a80-cf88-41c1-984f-d687932461d7> lazy-loaded attributes: flavor,pci_requests {{(pid=68638) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1303.250303] env[68638]: DEBUG nova.network.neutron [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1303.308940] env[68638]: DEBUG nova.policy [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '87dbe1b58a124d8ba72432b58a711496', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03a342a1ef674059b9ab1a5dc050a82d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1303.339985] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager.update_available_resource {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1303.500779] env[68638]: DEBUG nova.compute.manager [req-fb96be22-65a4-4963-b80f-0faa8b34253d req-57486539-8e57-4d87-ab63-aa71cca1cc10 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Received event network-changed-2253d838-fc66-4ff8-b86d-5d516a933a07 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1303.500984] env[68638]: DEBUG nova.compute.manager [req-fb96be22-65a4-4963-b80f-0faa8b34253d req-57486539-8e57-4d87-ab63-aa71cca1cc10 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Refreshing instance network info cache due to event network-changed-2253d838-fc66-4ff8-b86d-5d516a933a07. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1303.501291] env[68638]: DEBUG oslo_concurrency.lockutils [req-fb96be22-65a4-4963-b80f-0faa8b34253d req-57486539-8e57-4d87-ab63-aa71cca1cc10 service nova] Acquiring lock "refresh_cache-230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1303.501443] env[68638]: DEBUG oslo_concurrency.lockutils [req-fb96be22-65a4-4963-b80f-0faa8b34253d req-57486539-8e57-4d87-ab63-aa71cca1cc10 service nova] Acquired lock "refresh_cache-230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1303.501608] env[68638]: DEBUG nova.network.neutron [req-fb96be22-65a4-4963-b80f-0faa8b34253d req-57486539-8e57-4d87-ab63-aa71cca1cc10 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Refreshing network info cache for port 2253d838-fc66-4ff8-b86d-5d516a933a07 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1303.560490] env[68638]: INFO nova.compute.manager [-] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Took 1.42 seconds to deallocate network for instance. [ 1303.844379] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1303.844379] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1303.844379] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1303.844379] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68638) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1303.845097] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4985da7b-a700-4d04-986a-5c085af6d122 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.853489] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a4f1ff-883f-40d2-9010-c9a02f717912 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.867391] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b95a59-4ccd-4567-b720-ac730f44e084 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.873522] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c0bfbd-d493-49a6-81fd-7a550991c8ab {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.901584] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180442MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=68638) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1303.901750] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1303.901933] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1304.066026] env[68638]: DEBUG oslo_concurrency.lockutils [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1304.193120] env[68638]: DEBUG nova.network.neutron [req-fb96be22-65a4-4963-b80f-0faa8b34253d req-57486539-8e57-4d87-ab63-aa71cca1cc10 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Updated VIF entry in instance network info cache for port 2253d838-fc66-4ff8-b86d-5d516a933a07. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1304.193504] env[68638]: DEBUG nova.network.neutron [req-fb96be22-65a4-4963-b80f-0faa8b34253d req-57486539-8e57-4d87-ab63-aa71cca1cc10 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Updating instance_info_cache with network_info: [{"id": "2253d838-fc66-4ff8-b86d-5d516a933a07", "address": "fa:16:3e:06:e8:21", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.214", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2253d838-fc", "ovs_interfaceid": "2253d838-fc66-4ff8-b86d-5d516a933a07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1304.696574] env[68638]: DEBUG oslo_concurrency.lockutils [req-fb96be22-65a4-4963-b80f-0faa8b34253d req-57486539-8e57-4d87-ab63-aa71cca1cc10 service nova] Releasing lock "refresh_cache-230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1304.696830] env[68638]: DEBUG nova.compute.manager [req-fb96be22-65a4-4963-b80f-0faa8b34253d req-57486539-8e57-4d87-ab63-aa71cca1cc10 service nova] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Received event network-vif-deleted-7c25dd2c-4a3e-4c76-9747-5f4fc5f8d267 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1304.745230] env[68638]: DEBUG nova.network.neutron [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Successfully updated port: 1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1304.927897] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1304.928259] env[68638]: WARNING nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 2452dd7a-5f16-4094-9407-59405eed572b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1304.928259] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance e6bb1034-e440-4fb2-ba56-a734c4f67cdb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1304.928381] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 230f1a80-cf88-41c1-984f-d687932461d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1304.928506] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1304.928689] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1304.983419] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4842648d-9470-422f-88ff-99796e5e4c8b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.992272] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-675c0b8f-b0b8-4bc9-90c3-099b76fe53f3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.021739] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58247047-2c68-45de-96d3-5482589427c4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.028593] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33daeb32-b1e4-42cf-bd3e-f4d2c8e352cb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.041078] env[68638]: DEBUG nova.compute.provider_tree [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1305.247517] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "refresh_cache-230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1305.247695] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired lock "refresh_cache-230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1305.247837] env[68638]: DEBUG nova.network.neutron [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1305.543635] env[68638]: DEBUG nova.scheduler.client.report [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1305.562923] env[68638]: DEBUG nova.compute.manager [req-7571a399-36ca-43ce-928a-c303c6ace69e req-7b6990ea-76a9-45f2-b3a9-fb85fda86a34 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Received event network-vif-plugged-1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1305.563185] env[68638]: DEBUG oslo_concurrency.lockutils [req-7571a399-36ca-43ce-928a-c303c6ace69e req-7b6990ea-76a9-45f2-b3a9-fb85fda86a34 service nova] Acquiring lock "230f1a80-cf88-41c1-984f-d687932461d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1305.563308] env[68638]: DEBUG oslo_concurrency.lockutils [req-7571a399-36ca-43ce-928a-c303c6ace69e req-7b6990ea-76a9-45f2-b3a9-fb85fda86a34 service nova] Lock "230f1a80-cf88-41c1-984f-d687932461d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1305.563473] env[68638]: DEBUG oslo_concurrency.lockutils [req-7571a399-36ca-43ce-928a-c303c6ace69e req-7b6990ea-76a9-45f2-b3a9-fb85fda86a34 service nova] Lock "230f1a80-cf88-41c1-984f-d687932461d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1305.563646] env[68638]: DEBUG nova.compute.manager [req-7571a399-36ca-43ce-928a-c303c6ace69e req-7b6990ea-76a9-45f2-b3a9-fb85fda86a34 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] No waiting events found dispatching network-vif-plugged-1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1305.563814] env[68638]: WARNING nova.compute.manager [req-7571a399-36ca-43ce-928a-c303c6ace69e req-7b6990ea-76a9-45f2-b3a9-fb85fda86a34 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Received unexpected event network-vif-plugged-1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6 for instance with vm_state active and task_state None. [ 1305.563971] env[68638]: DEBUG nova.compute.manager [req-7571a399-36ca-43ce-928a-c303c6ace69e req-7b6990ea-76a9-45f2-b3a9-fb85fda86a34 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Received event network-changed-1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1305.564139] env[68638]: DEBUG nova.compute.manager [req-7571a399-36ca-43ce-928a-c303c6ace69e req-7b6990ea-76a9-45f2-b3a9-fb85fda86a34 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Refreshing instance network info cache due to event network-changed-1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1305.564303] env[68638]: DEBUG oslo_concurrency.lockutils [req-7571a399-36ca-43ce-928a-c303c6ace69e req-7b6990ea-76a9-45f2-b3a9-fb85fda86a34 service nova] Acquiring lock "refresh_cache-230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1305.782406] env[68638]: WARNING nova.network.neutron [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] cd5da8a3-b68c-498e-8922-d556cd2178c4 already exists in list: networks containing: ['cd5da8a3-b68c-498e-8922-d556cd2178c4']. ignoring it [ 1306.028012] env[68638]: DEBUG nova.network.neutron [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Updating instance_info_cache with network_info: [{"id": "2253d838-fc66-4ff8-b86d-5d516a933a07", "address": "fa:16:3e:06:e8:21", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.214", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2253d838-fc", "ovs_interfaceid": "2253d838-fc66-4ff8-b86d-5d516a933a07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6", "address": "fa:16:3e:21:c6:77", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e9a2f8e-a2", "ovs_interfaceid": "1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1306.047844] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68638) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1306.048075] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.146s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1306.048582] env[68638]: DEBUG oslo_concurrency.lockutils [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.983s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1306.048838] env[68638]: DEBUG oslo_concurrency.lockutils [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1306.070029] env[68638]: INFO nova.scheduler.client.report [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Deleted allocations for instance 2452dd7a-5f16-4094-9407-59405eed572b [ 1306.531410] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Releasing lock "refresh_cache-230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1306.532208] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1306.532384] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired lock "230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1306.532688] env[68638]: DEBUG oslo_concurrency.lockutils [req-7571a399-36ca-43ce-928a-c303c6ace69e req-7b6990ea-76a9-45f2-b3a9-fb85fda86a34 service nova] Acquired lock "refresh_cache-230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1306.532877] env[68638]: DEBUG nova.network.neutron [req-7571a399-36ca-43ce-928a-c303c6ace69e req-7b6990ea-76a9-45f2-b3a9-fb85fda86a34 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Refreshing network info cache for port 1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1306.534853] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-889fb95b-a3e2-4654-a4a5-fef169cf55b8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.552140] env[68638]: DEBUG nova.virt.hardware [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1306.552370] env[68638]: DEBUG nova.virt.hardware [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1306.552527] env[68638]: DEBUG nova.virt.hardware [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1306.552712] env[68638]: DEBUG nova.virt.hardware [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1306.552858] env[68638]: DEBUG nova.virt.hardware [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1306.553010] env[68638]: DEBUG nova.virt.hardware [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1306.553257] env[68638]: DEBUG nova.virt.hardware [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1306.553423] env[68638]: DEBUG nova.virt.hardware [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1306.553592] env[68638]: DEBUG nova.virt.hardware [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1306.553756] env[68638]: DEBUG nova.virt.hardware [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1306.553931] env[68638]: DEBUG nova.virt.hardware [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1306.560157] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Reconfiguring VM to attach interface {{(pid=68638) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1306.561116] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-464d3e46-6511-4226-8bd9-bd6bc641cc5e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.576914] env[68638]: DEBUG oslo_concurrency.lockutils [None req-df2ecd03-1dbc-44c8-8c77-da375464b06e tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "2452dd7a-5f16-4094-9407-59405eed572b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.055s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1306.579016] env[68638]: DEBUG oslo_vmware.api [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1306.579016] env[68638]: value = "task-2834647" [ 1306.579016] env[68638]: _type = "Task" [ 1306.579016] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.589925] env[68638]: DEBUG oslo_vmware.api [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834647, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.051516] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1307.090894] env[68638]: DEBUG oslo_vmware.api [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834647, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.523902] env[68638]: DEBUG nova.network.neutron [req-7571a399-36ca-43ce-928a-c303c6ace69e req-7b6990ea-76a9-45f2-b3a9-fb85fda86a34 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Updated VIF entry in instance network info cache for port 1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1307.524409] env[68638]: DEBUG nova.network.neutron [req-7571a399-36ca-43ce-928a-c303c6ace69e req-7b6990ea-76a9-45f2-b3a9-fb85fda86a34 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Updating instance_info_cache with network_info: [{"id": "2253d838-fc66-4ff8-b86d-5d516a933a07", "address": "fa:16:3e:06:e8:21", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.214", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2253d838-fc", "ovs_interfaceid": "2253d838-fc66-4ff8-b86d-5d516a933a07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6", "address": "fa:16:3e:21:c6:77", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e9a2f8e-a2", "ovs_interfaceid": "1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1307.589939] env[68638]: DEBUG oslo_vmware.api [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834647, 'name': ReconfigVM_Task, 'duration_secs': 0.549996} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.590486] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Releasing lock "230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1307.590716] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Reconfigured VM to attach interface {{(pid=68638) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1308.027580] env[68638]: DEBUG oslo_concurrency.lockutils [req-7571a399-36ca-43ce-928a-c303c6ace69e req-7b6990ea-76a9-45f2-b3a9-fb85fda86a34 service nova] Releasing lock "refresh_cache-230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1308.095764] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4e24c225-1628-4df0-a151-965372614b6f tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "interface-230f1a80-cf88-41c1-984f-d687932461d7-1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 5.939s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1308.482092] env[68638]: DEBUG nova.compute.manager [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Stashing vm_state: active {{(pid=68638) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1308.999330] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1308.999665] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1309.365922] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "interface-230f1a80-cf88-41c1-984f-d687932461d7-1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1309.366302] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "interface-230f1a80-cf88-41c1-984f-d687932461d7-1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1309.504986] env[68638]: INFO nova.compute.claims [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1309.869339] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1309.869508] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired lock "230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1309.870465] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5bbddc6-5257-43f7-aa0b-fdf2c647e134 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.887979] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-885fb59c-3032-411c-871e-cee04c17a85e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.913274] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Reconfiguring VM to detach interface {{(pid=68638) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1309.913537] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb2956c9-7c5e-450e-a4ec-d62225cc0daa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.931387] env[68638]: DEBUG oslo_vmware.api [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1309.931387] env[68638]: value = "task-2834648" [ 1309.931387] env[68638]: _type = "Task" [ 1309.931387] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.939062] env[68638]: DEBUG oslo_vmware.api [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834648, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.011164] env[68638]: INFO nova.compute.resource_tracker [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Updating resource usage from migration 3f7095a0-9559-4d5f-b8e7-759289f39838 [ 1310.071406] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-819f3b22-5b01-44b8-b060-2c57cca5a5ef {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.078669] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c0fc36-06e2-4d23-b8ac-b7f232227e37 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.109311] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf3fc37c-ac88-4d1d-a910-a07354c0c684 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.115882] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a2088c6-5639-4da1-987b-8fa87ac830d8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.128282] env[68638]: DEBUG nova.compute.provider_tree [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1310.441037] env[68638]: DEBUG oslo_vmware.api [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.631930] env[68638]: DEBUG nova.scheduler.client.report [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1310.941458] env[68638]: DEBUG oslo_vmware.api [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.136739] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.137s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1311.137026] env[68638]: INFO nova.compute.manager [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Migrating [ 1311.441886] env[68638]: DEBUG oslo_vmware.api [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.651945] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "refresh_cache-58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.652207] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired lock "refresh_cache-58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1311.652325] env[68638]: DEBUG nova.network.neutron [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1311.942777] env[68638]: DEBUG oslo_vmware.api [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.342569] env[68638]: DEBUG nova.network.neutron [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Updating instance_info_cache with network_info: [{"id": "ed59a3ef-d65c-48e6-9271-4552c024c365", "address": "fa:16:3e:18:6d:07", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped59a3ef-d6", "ovs_interfaceid": "ed59a3ef-d65c-48e6-9271-4552c024c365", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1312.443713] env[68638]: DEBUG oslo_vmware.api [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.845873] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Releasing lock "refresh_cache-58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1312.944652] env[68638]: DEBUG oslo_vmware.api [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.444918] env[68638]: DEBUG oslo_vmware.api [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.945019] env[68638]: DEBUG oslo_vmware.api [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.360803] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-648dd168-1190-48a1-a2ec-ec8b14e9246b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.379440] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Updating instance '58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9' progress to 0 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1314.445267] env[68638]: DEBUG oslo_vmware.api [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.884760] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1314.885061] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b2cbe679-9f2b-46ca-ab0d-9c85bcd25dff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.892208] env[68638]: DEBUG oslo_vmware.api [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1314.892208] env[68638]: value = "task-2834649" [ 1314.892208] env[68638]: _type = "Task" [ 1314.892208] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.900025] env[68638]: DEBUG oslo_vmware.api [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834649, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.948954] env[68638]: DEBUG oslo_vmware.api [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.401723] env[68638]: DEBUG oslo_vmware.api [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834649, 'name': PowerOffVM_Task, 'duration_secs': 0.17215} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.401989] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1315.402135] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Updating instance '58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9' progress to 17 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1315.447803] env[68638]: DEBUG oslo_vmware.api [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834648, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.909019] env[68638]: DEBUG nova.virt.hardware [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1315.909315] env[68638]: DEBUG nova.virt.hardware [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1315.909432] env[68638]: DEBUG nova.virt.hardware [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1315.909610] env[68638]: DEBUG nova.virt.hardware [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1315.909755] env[68638]: DEBUG nova.virt.hardware [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1315.909903] env[68638]: DEBUG nova.virt.hardware [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1315.910137] env[68638]: DEBUG nova.virt.hardware [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1315.910305] env[68638]: DEBUG nova.virt.hardware [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1315.910474] env[68638]: DEBUG nova.virt.hardware [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1315.910635] env[68638]: DEBUG nova.virt.hardware [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1315.910869] env[68638]: DEBUG nova.virt.hardware [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1315.915940] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f6d7948-ac53-4108-ad16-e1f5094409b1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.931524] env[68638]: DEBUG oslo_vmware.api [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1315.931524] env[68638]: value = "task-2834650" [ 1315.931524] env[68638]: _type = "Task" [ 1315.931524] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.939304] env[68638]: DEBUG oslo_vmware.api [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834650, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.946303] env[68638]: DEBUG oslo_vmware.api [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834648, 'name': ReconfigVM_Task, 'duration_secs': 5.941435} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.946626] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Releasing lock "230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1315.946932] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Reconfigured VM to detach interface {{(pid=68638) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1316.441463] env[68638]: DEBUG oslo_vmware.api [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834650, 'name': ReconfigVM_Task, 'duration_secs': 0.176862} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.441780] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Updating instance '58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9' progress to 33 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1316.947880] env[68638]: DEBUG nova.virt.hardware [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1316.948424] env[68638]: DEBUG nova.virt.hardware [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1316.948424] env[68638]: DEBUG nova.virt.hardware [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1316.948613] env[68638]: DEBUG nova.virt.hardware [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1316.948613] env[68638]: DEBUG nova.virt.hardware [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1316.948756] env[68638]: DEBUG nova.virt.hardware [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1316.949059] env[68638]: DEBUG nova.virt.hardware [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1316.949237] env[68638]: DEBUG nova.virt.hardware [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1316.949413] env[68638]: DEBUG nova.virt.hardware [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1316.949596] env[68638]: DEBUG nova.virt.hardware [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1316.949786] env[68638]: DEBUG nova.virt.hardware [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1316.955092] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Reconfiguring VM instance instance-00000066 to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1316.955387] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f634905-d679-46df-9922-2193b7e4ebd7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.974092] env[68638]: DEBUG oslo_vmware.api [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1316.974092] env[68638]: value = "task-2834651" [ 1316.974092] env[68638]: _type = "Task" [ 1316.974092] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.981791] env[68638]: DEBUG oslo_vmware.api [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834651, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.260791] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "refresh_cache-230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1317.261075] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquired lock "refresh_cache-230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1317.261260] env[68638]: DEBUG nova.network.neutron [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1317.483663] env[68638]: DEBUG oslo_vmware.api [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834651, 'name': ReconfigVM_Task, 'duration_secs': 0.148827} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.483886] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Reconfigured VM instance instance-00000066 to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1317.484667] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88177b4a-4650-483a-a5f6-62924b9fa385 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.507186] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9/58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1317.507421] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6333ad8d-40bf-47ba-b631-daa617922d41 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.524367] env[68638]: DEBUG oslo_vmware.api [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1317.524367] env[68638]: value = "task-2834652" [ 1317.524367] env[68638]: _type = "Task" [ 1317.524367] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.531908] env[68638]: DEBUG oslo_vmware.api [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834652, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.731734] env[68638]: DEBUG oslo_concurrency.lockutils [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "230f1a80-cf88-41c1-984f-d687932461d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1317.732129] env[68638]: DEBUG oslo_concurrency.lockutils [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "230f1a80-cf88-41c1-984f-d687932461d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1317.732416] env[68638]: DEBUG oslo_concurrency.lockutils [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "230f1a80-cf88-41c1-984f-d687932461d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1317.732683] env[68638]: DEBUG oslo_concurrency.lockutils [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "230f1a80-cf88-41c1-984f-d687932461d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1317.732921] env[68638]: DEBUG oslo_concurrency.lockutils [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "230f1a80-cf88-41c1-984f-d687932461d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1317.735573] env[68638]: INFO nova.compute.manager [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Terminating instance [ 1317.962298] env[68638]: INFO nova.network.neutron [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Port 1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1317.962663] env[68638]: DEBUG nova.network.neutron [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Updating instance_info_cache with network_info: [{"id": "2253d838-fc66-4ff8-b86d-5d516a933a07", "address": "fa:16:3e:06:e8:21", "network": {"id": "cd5da8a3-b68c-498e-8922-d556cd2178c4", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-247450824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.214", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03a342a1ef674059b9ab1a5dc050a82d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2180b40f-2bb0-47da-ba80-c2fbe7f98af0", "external-id": "nsx-vlan-transportzone-970", "segmentation_id": 970, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2253d838-fc", "ovs_interfaceid": "2253d838-fc66-4ff8-b86d-5d516a933a07", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1318.033844] env[68638]: DEBUG oslo_vmware.api [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834652, 'name': ReconfigVM_Task, 'duration_secs': 0.266635} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.034170] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Reconfigured VM instance instance-00000066 to attach disk [datastore2] 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9/58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1318.034474] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Updating instance '58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9' progress to 50 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1318.239355] env[68638]: DEBUG nova.compute.manager [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1318.239631] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1318.241032] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d16597-7df2-4f4d-a0cc-b070e2939bea {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.248496] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1318.248696] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0bc92e68-c178-47c0-bf26-255e5390bb75 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.256175] env[68638]: DEBUG oslo_vmware.api [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1318.256175] env[68638]: value = "task-2834653" [ 1318.256175] env[68638]: _type = "Task" [ 1318.256175] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.263579] env[68638]: DEBUG oslo_vmware.api [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834653, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.465608] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Releasing lock "refresh_cache-230f1a80-cf88-41c1-984f-d687932461d7" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1318.541432] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71dd7d35-e887-437d-be71-64037e925366 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.560291] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce3db75-1325-4e4f-9867-db89e9e5977a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.576959] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Updating instance '58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9' progress to 67 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1318.765757] env[68638]: DEBUG oslo_vmware.api [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834653, 'name': PowerOffVM_Task, 'duration_secs': 0.223242} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.766035] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1318.766222] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1318.766497] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-32519f48-70ae-48a5-8756-9dc2d7531273 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.824377] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1318.824610] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1318.824796] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Deleting the datastore file [datastore2] 230f1a80-cf88-41c1-984f-d687932461d7 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1318.825063] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-761991f7-5eec-419f-8887-f2ff914261ac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.830590] env[68638]: DEBUG oslo_vmware.api [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1318.830590] env[68638]: value = "task-2834655" [ 1318.830590] env[68638]: _type = "Task" [ 1318.830590] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.839294] env[68638]: DEBUG oslo_vmware.api [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834655, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.970095] env[68638]: DEBUG oslo_concurrency.lockutils [None req-b11aee81-f749-46c9-aea7-46b615738295 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "interface-230f1a80-cf88-41c1-984f-d687932461d7-1e9a2f8e-a220-4f2d-9a0b-b25692f9a9b6" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.604s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1319.113986] env[68638]: DEBUG nova.network.neutron [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Port ed59a3ef-d65c-48e6-9271-4552c024c365 binding to destination host cpu-1 is already ACTIVE {{(pid=68638) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1319.340607] env[68638]: DEBUG oslo_vmware.api [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834655, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15088} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.340864] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1319.341047] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1319.341241] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1319.341418] env[68638]: INFO nova.compute.manager [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1319.341669] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1319.341896] env[68638]: DEBUG nova.compute.manager [-] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1319.342011] env[68638]: DEBUG nova.network.neutron [-] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1319.982112] env[68638]: DEBUG nova.compute.manager [req-9ebddbb9-423a-48c9-8ae4-459c2f09809c req-4784282a-48f1-4967-bc9b-393fa6676496 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Received event network-vif-deleted-2253d838-fc66-4ff8-b86d-5d516a933a07 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1319.982418] env[68638]: INFO nova.compute.manager [req-9ebddbb9-423a-48c9-8ae4-459c2f09809c req-4784282a-48f1-4967-bc9b-393fa6676496 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Neutron deleted interface 2253d838-fc66-4ff8-b86d-5d516a933a07; detaching it from the instance and deleting it from the info cache [ 1319.982675] env[68638]: DEBUG nova.network.neutron [req-9ebddbb9-423a-48c9-8ae4-459c2f09809c req-4784282a-48f1-4967-bc9b-393fa6676496 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1320.136973] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1320.137216] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1320.137389] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1320.467170] env[68638]: DEBUG nova.network.neutron [-] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1320.484585] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b6de580a-cc83-417d-894a-819b4f4f415f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.494531] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1251804f-bf99-4952-898d-b4283c5df743 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.519038] env[68638]: DEBUG nova.compute.manager [req-9ebddbb9-423a-48c9-8ae4-459c2f09809c req-4784282a-48f1-4967-bc9b-393fa6676496 service nova] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Detach interface failed, port_id=2253d838-fc66-4ff8-b86d-5d516a933a07, reason: Instance 230f1a80-cf88-41c1-984f-d687932461d7 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1320.969975] env[68638]: INFO nova.compute.manager [-] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Took 1.63 seconds to deallocate network for instance. [ 1321.170925] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "refresh_cache-58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1321.171237] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired lock "refresh_cache-58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1321.171352] env[68638]: DEBUG nova.network.neutron [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1321.476842] env[68638]: DEBUG oslo_concurrency.lockutils [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1321.477155] env[68638]: DEBUG oslo_concurrency.lockutils [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1321.477384] env[68638]: DEBUG nova.objects.instance [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lazy-loading 'resources' on Instance uuid 230f1a80-cf88-41c1-984f-d687932461d7 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1321.893355] env[68638]: DEBUG nova.network.neutron [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Updating instance_info_cache with network_info: [{"id": "ed59a3ef-d65c-48e6-9271-4552c024c365", "address": "fa:16:3e:18:6d:07", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped59a3ef-d6", "ovs_interfaceid": "ed59a3ef-d65c-48e6-9271-4552c024c365", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1322.041580] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00076e4a-84f3-48bd-8647-39394d331688 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.049237] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70166479-0bbb-4186-99e6-c5b83a679223 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.078244] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7589e82-643c-4376-9a42-edb58276dc2a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.085499] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f1a0489-ed0b-4540-9029-c23c5f816c0f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.098159] env[68638]: DEBUG nova.compute.provider_tree [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1322.396231] env[68638]: DEBUG oslo_concurrency.lockutils [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Releasing lock "refresh_cache-58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1322.601538] env[68638]: DEBUG nova.scheduler.client.report [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1322.920177] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac9a308-cfcb-44cb-8616-5ce52d2debcd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.938416] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf4ade3d-f5bb-45b8-a89a-1436a11b22cb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.944766] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Updating instance '58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9' progress to 83 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1323.105895] env[68638]: DEBUG oslo_concurrency.lockutils [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.629s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1323.124054] env[68638]: INFO nova.scheduler.client.report [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Deleted allocations for instance 230f1a80-cf88-41c1-984f-d687932461d7 [ 1323.451301] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1323.451740] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a75a7f2-dd97-44c4-a1be-ff867e37ea35 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.459261] env[68638]: DEBUG oslo_vmware.api [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1323.459261] env[68638]: value = "task-2834656" [ 1323.459261] env[68638]: _type = "Task" [ 1323.459261] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.466826] env[68638]: DEBUG oslo_vmware.api [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834656, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.631933] env[68638]: DEBUG oslo_concurrency.lockutils [None req-20a55cf6-1758-4ba6-8551-78896edb2651 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "230f1a80-cf88-41c1-984f-d687932461d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.900s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1323.969911] env[68638]: DEBUG oslo_vmware.api [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834656, 'name': PowerOnVM_Task, 'duration_secs': 0.359774} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.970204] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1323.970380] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff731ed-7f56-4675-96eb-d20feb090cd5 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Updating instance '58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9' progress to 100 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1324.315508] env[68638]: DEBUG oslo_concurrency.lockutils [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "e6bb1034-e440-4fb2-ba56-a734c4f67cdb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1324.315811] env[68638]: DEBUG oslo_concurrency.lockutils [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "e6bb1034-e440-4fb2-ba56-a734c4f67cdb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1324.315986] env[68638]: DEBUG oslo_concurrency.lockutils [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "e6bb1034-e440-4fb2-ba56-a734c4f67cdb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1324.316189] env[68638]: DEBUG oslo_concurrency.lockutils [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "e6bb1034-e440-4fb2-ba56-a734c4f67cdb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1324.316363] env[68638]: DEBUG oslo_concurrency.lockutils [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "e6bb1034-e440-4fb2-ba56-a734c4f67cdb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1324.318672] env[68638]: INFO nova.compute.manager [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Terminating instance [ 1324.822203] env[68638]: DEBUG nova.compute.manager [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1324.822611] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1324.823427] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-046f57a1-84b1-4e76-ab49-0cf4a7836ea8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.831817] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1324.832073] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa277020-69f0-4417-931d-3cc78dd64a9f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.839012] env[68638]: DEBUG oslo_vmware.api [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1324.839012] env[68638]: value = "task-2834657" [ 1324.839012] env[68638]: _type = "Task" [ 1324.839012] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.847137] env[68638]: DEBUG oslo_vmware.api [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834657, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.349848] env[68638]: DEBUG oslo_vmware.api [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834657, 'name': PowerOffVM_Task, 'duration_secs': 0.21353} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.350200] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1325.350388] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1325.350650] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b61c2c45-76bf-4dad-9fe4-1b74d1c5501c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.411985] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1325.412221] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1325.412417] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Deleting the datastore file [datastore1] e6bb1034-e440-4fb2-ba56-a734c4f67cdb {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1325.412638] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bae7dc2c-3dd5-4fc6-947b-558bf708cfdd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.420306] env[68638]: DEBUG oslo_vmware.api [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for the task: (returnval){ [ 1325.420306] env[68638]: value = "task-2834659" [ 1325.420306] env[68638]: _type = "Task" [ 1325.420306] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.427844] env[68638]: DEBUG oslo_vmware.api [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834659, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.934233] env[68638]: DEBUG oslo_vmware.api [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Task: {'id': task-2834659, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154952} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.934773] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1325.934858] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1325.935137] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1325.935406] env[68638]: INFO nova.compute.manager [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1325.935753] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1325.936042] env[68638]: DEBUG nova.compute.manager [-] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1325.936193] env[68638]: DEBUG nova.network.neutron [-] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1326.271790] env[68638]: DEBUG nova.compute.manager [req-49fd3208-f47b-47ef-b0d9-c95de2a39c78 req-10c3146f-6ee3-467b-8720-f242036975ae service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Received event network-vif-deleted-d0d7d883-914c-4c61-b0d1-3a31b2df0943 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1326.272063] env[68638]: INFO nova.compute.manager [req-49fd3208-f47b-47ef-b0d9-c95de2a39c78 req-10c3146f-6ee3-467b-8720-f242036975ae service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Neutron deleted interface d0d7d883-914c-4c61-b0d1-3a31b2df0943; detaching it from the instance and deleting it from the info cache [ 1326.272737] env[68638]: DEBUG nova.network.neutron [req-49fd3208-f47b-47ef-b0d9-c95de2a39c78 req-10c3146f-6ee3-467b-8720-f242036975ae service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1326.752111] env[68638]: DEBUG nova.network.neutron [-] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1326.775948] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b3871631-b19f-467b-9918-0312136201f6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.785732] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a702accb-eba0-4536-b8ad-51325b3ebdfd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.797214] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dac4c2a7-4263-4a67-8c23-8b8f944b39ae tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1326.797454] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dac4c2a7-4263-4a67-8c23-8b8f944b39ae tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1326.797637] env[68638]: DEBUG nova.compute.manager [None req-dac4c2a7-4263-4a67-8c23-8b8f944b39ae tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Going to confirm migration 7 {{(pid=68638) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1326.813200] env[68638]: DEBUG nova.compute.manager [req-49fd3208-f47b-47ef-b0d9-c95de2a39c78 req-10c3146f-6ee3-467b-8720-f242036975ae service nova] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Detach interface failed, port_id=d0d7d883-914c-4c61-b0d1-3a31b2df0943, reason: Instance e6bb1034-e440-4fb2-ba56-a734c4f67cdb could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1327.255113] env[68638]: INFO nova.compute.manager [-] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Took 1.32 seconds to deallocate network for instance. [ 1327.365559] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dac4c2a7-4263-4a67-8c23-8b8f944b39ae tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "refresh_cache-58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1327.365763] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dac4c2a7-4263-4a67-8c23-8b8f944b39ae tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired lock "refresh_cache-58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1327.365940] env[68638]: DEBUG nova.network.neutron [None req-dac4c2a7-4263-4a67-8c23-8b8f944b39ae tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1327.366140] env[68638]: DEBUG nova.objects.instance [None req-dac4c2a7-4263-4a67-8c23-8b8f944b39ae tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lazy-loading 'info_cache' on Instance uuid 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1327.761833] env[68638]: DEBUG oslo_concurrency.lockutils [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1327.762184] env[68638]: DEBUG oslo_concurrency.lockutils [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1327.762372] env[68638]: DEBUG nova.objects.instance [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lazy-loading 'resources' on Instance uuid e6bb1034-e440-4fb2-ba56-a734c4f67cdb {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1328.317332] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b68a3bf2-197f-49cf-ac14-ac93c1cec97f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.325035] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea1262f1-463e-4d9f-8bb1-0e7e8f21dee5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.353804] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24d8dea6-23a0-480f-8902-cb57f4f25731 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.360445] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b8876b-d824-4370-a8f5-5a800c254379 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.375987] env[68638]: DEBUG nova.compute.provider_tree [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1328.607816] env[68638]: DEBUG nova.network.neutron [None req-dac4c2a7-4263-4a67-8c23-8b8f944b39ae tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Updating instance_info_cache with network_info: [{"id": "ed59a3ef-d65c-48e6-9271-4552c024c365", "address": "fa:16:3e:18:6d:07", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped59a3ef-d6", "ovs_interfaceid": "ed59a3ef-d65c-48e6-9271-4552c024c365", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1328.896101] env[68638]: ERROR nova.scheduler.client.report [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] [req-6b21a19c-a53f-4265-8325-c0bc27b1cab6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6b21a19c-a53f-4265-8325-c0bc27b1cab6"}]} [ 1328.911422] env[68638]: DEBUG nova.scheduler.client.report [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1328.923772] env[68638]: DEBUG nova.scheduler.client.report [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1328.923996] env[68638]: DEBUG nova.compute.provider_tree [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1328.934725] env[68638]: DEBUG nova.scheduler.client.report [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1328.951311] env[68638]: DEBUG nova.scheduler.client.report [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1328.995393] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd911a3a-4adc-4441-b080-1d96b9872309 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.002809] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1546d79-0a2a-4eb5-9c89-f1d50be12a11 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.032158] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c635be02-9866-4c08-8e7e-63bcfa1047f5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.038671] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ae3ef8-5c4a-4d78-b76a-62a98a53412b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.051222] env[68638]: DEBUG nova.compute.provider_tree [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1329.110729] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dac4c2a7-4263-4a67-8c23-8b8f944b39ae tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Releasing lock "refresh_cache-58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1329.110976] env[68638]: DEBUG nova.objects.instance [None req-dac4c2a7-4263-4a67-8c23-8b8f944b39ae tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lazy-loading 'migration_context' on Instance uuid 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1329.581343] env[68638]: DEBUG nova.scheduler.client.report [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 172 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1329.581691] env[68638]: DEBUG nova.compute.provider_tree [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 172 to 173 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1329.581777] env[68638]: DEBUG nova.compute.provider_tree [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1329.613827] env[68638]: DEBUG nova.objects.base [None req-dac4c2a7-4263-4a67-8c23-8b8f944b39ae tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Object Instance<58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9> lazy-loaded attributes: info_cache,migration_context {{(pid=68638) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1329.615028] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c90ecf86-161c-4845-97d9-e6cffcb3ec53 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.635497] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49ca08fb-b0f5-476c-a55a-1e900a6271f3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.641496] env[68638]: DEBUG oslo_vmware.api [None req-dac4c2a7-4263-4a67-8c23-8b8f944b39ae tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1329.641496] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52a183df-e0fd-b961-545d-bcc313b3045b" [ 1329.641496] env[68638]: _type = "Task" [ 1329.641496] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.649137] env[68638]: DEBUG oslo_vmware.api [None req-dac4c2a7-4263-4a67-8c23-8b8f944b39ae tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a183df-e0fd-b961-545d-bcc313b3045b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.086834] env[68638]: DEBUG oslo_concurrency.lockutils [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.325s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1330.104218] env[68638]: INFO nova.scheduler.client.report [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Deleted allocations for instance e6bb1034-e440-4fb2-ba56-a734c4f67cdb [ 1330.152472] env[68638]: DEBUG oslo_vmware.api [None req-dac4c2a7-4263-4a67-8c23-8b8f944b39ae tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a183df-e0fd-b961-545d-bcc313b3045b, 'name': SearchDatastore_Task, 'duration_secs': 0.008741} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.152750] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dac4c2a7-4263-4a67-8c23-8b8f944b39ae tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1330.152986] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dac4c2a7-4263-4a67-8c23-8b8f944b39ae tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1330.613907] env[68638]: DEBUG oslo_concurrency.lockutils [None req-eae22a72-1445-42c1-b1ad-08feab5a2867 tempest-AttachInterfacesTestJSON-1639086093 tempest-AttachInterfacesTestJSON-1639086093-project-member] Lock "e6bb1034-e440-4fb2-ba56-a734c4f67cdb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.298s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1330.703386] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85381741-9e01-433b-b182-e8c5583b81da {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.711349] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f85b2bc-3043-4d04-8a1b-02b1b3c7e569 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.741371] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95df4c2-4b05-49c0-9965-bd76450db5a2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.748598] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5286638-b002-4211-aef0-c04aa86fa703 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.763126] env[68638]: DEBUG nova.compute.provider_tree [None req-dac4c2a7-4263-4a67-8c23-8b8f944b39ae tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1331.266889] env[68638]: DEBUG nova.scheduler.client.report [None req-dac4c2a7-4263-4a67-8c23-8b8f944b39ae tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1332.277108] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dac4c2a7-4263-4a67-8c23-8b8f944b39ae tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.124s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1332.839503] env[68638]: INFO nova.scheduler.client.report [None req-dac4c2a7-4263-4a67-8c23-8b8f944b39ae tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Deleted allocation for migration 3f7095a0-9559-4d5f-b8e7-759289f39838 [ 1333.347376] env[68638]: DEBUG oslo_concurrency.lockutils [None req-dac4c2a7-4263-4a67-8c23-8b8f944b39ae tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.549s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1334.210958] env[68638]: DEBUG oslo_concurrency.lockutils [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1334.211260] env[68638]: DEBUG oslo_concurrency.lockutils [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1334.211475] env[68638]: DEBUG oslo_concurrency.lockutils [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1334.211686] env[68638]: DEBUG oslo_concurrency.lockutils [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1334.211866] env[68638]: DEBUG oslo_concurrency.lockutils [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1334.213923] env[68638]: INFO nova.compute.manager [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Terminating instance [ 1334.718753] env[68638]: DEBUG nova.compute.manager [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1334.718753] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1334.719560] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12078562-8abe-4134-8d2c-d00e6a4cf41d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.727531] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1334.727817] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f1547ab3-5a65-4107-9e93-9f8d96eab10c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.737514] env[68638]: DEBUG oslo_vmware.api [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1334.737514] env[68638]: value = "task-2834660" [ 1334.737514] env[68638]: _type = "Task" [ 1334.737514] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.746231] env[68638]: DEBUG oslo_vmware.api [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834660, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.247621] env[68638]: DEBUG oslo_vmware.api [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834660, 'name': PowerOffVM_Task, 'duration_secs': 0.19562} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.247733] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1335.247870] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1335.248136] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-793a49cb-d60b-44ae-a40f-7c970df0c1de {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.312030] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1335.312030] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1335.312030] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Deleting the datastore file [datastore2] 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1335.312030] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3fc1c604-8b6d-4663-98de-4e5fa08ea048 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.317704] env[68638]: DEBUG oslo_vmware.api [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1335.317704] env[68638]: value = "task-2834662" [ 1335.317704] env[68638]: _type = "Task" [ 1335.317704] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.326041] env[68638]: DEBUG oslo_vmware.api [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834662, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.827633] env[68638]: DEBUG oslo_vmware.api [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834662, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15182} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.828145] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1335.828145] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1335.828318] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1335.828419] env[68638]: INFO nova.compute.manager [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1335.828664] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1335.828850] env[68638]: DEBUG nova.compute.manager [-] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1335.828934] env[68638]: DEBUG nova.network.neutron [-] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1336.313629] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Acquiring lock "43b466d6-03fb-49d5-9404-f0009199fbe1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1336.313867] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Lock "43b466d6-03fb-49d5-9404-f0009199fbe1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1336.380916] env[68638]: DEBUG nova.compute.manager [req-0c18a76a-61ae-4570-8fed-855185bbb489 req-170ac162-210e-4217-8fa1-f41dc88a515e service nova] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Received event network-vif-deleted-ed59a3ef-d65c-48e6-9271-4552c024c365 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1336.381276] env[68638]: INFO nova.compute.manager [req-0c18a76a-61ae-4570-8fed-855185bbb489 req-170ac162-210e-4217-8fa1-f41dc88a515e service nova] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Neutron deleted interface ed59a3ef-d65c-48e6-9271-4552c024c365; detaching it from the instance and deleting it from the info cache [ 1336.381514] env[68638]: DEBUG nova.network.neutron [req-0c18a76a-61ae-4570-8fed-855185bbb489 req-170ac162-210e-4217-8fa1-f41dc88a515e service nova] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.816764] env[68638]: DEBUG nova.compute.manager [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1336.864091] env[68638]: DEBUG nova.network.neutron [-] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.885037] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-51e51227-c928-4c01-80ff-7fa0f9a36fc0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.898162] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d499f90b-6b56-482f-91f5-cfc234aa0dd7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.925823] env[68638]: DEBUG nova.compute.manager [req-0c18a76a-61ae-4570-8fed-855185bbb489 req-170ac162-210e-4217-8fa1-f41dc88a515e service nova] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Detach interface failed, port_id=ed59a3ef-d65c-48e6-9271-4552c024c365, reason: Instance 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1337.348420] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1337.348777] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1337.350506] env[68638]: INFO nova.compute.claims [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1337.367801] env[68638]: INFO nova.compute.manager [-] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Took 1.54 seconds to deallocate network for instance. [ 1337.874365] env[68638]: DEBUG oslo_concurrency.lockutils [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1338.402197] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817aa273-d2cd-45e3-a321-411ee5130e73 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.410214] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e94dd9ee-73db-46a9-9799-8ebfb41311a8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.440536] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b1cac6-0c1c-46c9-b23e-f5ad9308f803 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.448134] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-108c70e6-2bbc-453b-b689-2e449aeba3b2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.461779] env[68638]: DEBUG nova.compute.provider_tree [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1338.965407] env[68638]: DEBUG nova.scheduler.client.report [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1339.471592] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.123s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1339.472100] env[68638]: DEBUG nova.compute.manager [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1339.474630] env[68638]: DEBUG oslo_concurrency.lockutils [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.600s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1339.474824] env[68638]: DEBUG oslo_concurrency.lockutils [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1339.502944] env[68638]: INFO nova.scheduler.client.report [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Deleted allocations for instance 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9 [ 1339.979857] env[68638]: DEBUG nova.compute.utils [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1339.981292] env[68638]: DEBUG nova.compute.manager [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1339.981494] env[68638]: DEBUG nova.network.neutron [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1340.010892] env[68638]: DEBUG oslo_concurrency.lockutils [None req-45d974bc-1f2a-4e01-b751-2f918d3b8ae6 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.800s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1340.095876] env[68638]: DEBUG nova.policy [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c23c16c455cd46d7b3d7a137d6ca14f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '50c3086679554087a646211f05a34950', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1340.395246] env[68638]: DEBUG nova.network.neutron [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Successfully created port: 62048a54-b6eb-4116-b9a7-6ca217e72373 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1340.485266] env[68638]: DEBUG nova.compute.manager [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1341.494724] env[68638]: DEBUG nova.compute.manager [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1341.546199] env[68638]: DEBUG nova.virt.hardware [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1341.546531] env[68638]: DEBUG nova.virt.hardware [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1341.546721] env[68638]: DEBUG nova.virt.hardware [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1341.546912] env[68638]: DEBUG nova.virt.hardware [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1341.547177] env[68638]: DEBUG nova.virt.hardware [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1341.547383] env[68638]: DEBUG nova.virt.hardware [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1341.547594] env[68638]: DEBUG nova.virt.hardware [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1341.547762] env[68638]: DEBUG nova.virt.hardware [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1341.547931] env[68638]: DEBUG nova.virt.hardware [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1341.548113] env[68638]: DEBUG nova.virt.hardware [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1341.548292] env[68638]: DEBUG nova.virt.hardware [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1341.549235] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b9340f-0a63-4ace-a720-1e75938f01d1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.558526] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36177b2b-fa91-426c-8457-3dd0f01927a1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.957465] env[68638]: DEBUG nova.compute.manager [req-9d5e2d92-ada8-4868-9fa4-44a10c8b7dd5 req-e7f5e3cb-1038-43d9-94a5-198fad7bcd80 service nova] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Received event network-vif-plugged-62048a54-b6eb-4116-b9a7-6ca217e72373 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1341.957465] env[68638]: DEBUG oslo_concurrency.lockutils [req-9d5e2d92-ada8-4868-9fa4-44a10c8b7dd5 req-e7f5e3cb-1038-43d9-94a5-198fad7bcd80 service nova] Acquiring lock "43b466d6-03fb-49d5-9404-f0009199fbe1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1341.957465] env[68638]: DEBUG oslo_concurrency.lockutils [req-9d5e2d92-ada8-4868-9fa4-44a10c8b7dd5 req-e7f5e3cb-1038-43d9-94a5-198fad7bcd80 service nova] Lock "43b466d6-03fb-49d5-9404-f0009199fbe1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1341.957465] env[68638]: DEBUG oslo_concurrency.lockutils [req-9d5e2d92-ada8-4868-9fa4-44a10c8b7dd5 req-e7f5e3cb-1038-43d9-94a5-198fad7bcd80 service nova] Lock "43b466d6-03fb-49d5-9404-f0009199fbe1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1341.957465] env[68638]: DEBUG nova.compute.manager [req-9d5e2d92-ada8-4868-9fa4-44a10c8b7dd5 req-e7f5e3cb-1038-43d9-94a5-198fad7bcd80 service nova] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] No waiting events found dispatching network-vif-plugged-62048a54-b6eb-4116-b9a7-6ca217e72373 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1341.957677] env[68638]: WARNING nova.compute.manager [req-9d5e2d92-ada8-4868-9fa4-44a10c8b7dd5 req-e7f5e3cb-1038-43d9-94a5-198fad7bcd80 service nova] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Received unexpected event network-vif-plugged-62048a54-b6eb-4116-b9a7-6ca217e72373 for instance with vm_state building and task_state spawning. [ 1342.094547] env[68638]: DEBUG nova.network.neutron [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Successfully updated port: 62048a54-b6eb-4116-b9a7-6ca217e72373 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1342.110372] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1342.110658] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1342.597126] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Acquiring lock "refresh_cache-43b466d6-03fb-49d5-9404-f0009199fbe1" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1342.597516] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Acquired lock "refresh_cache-43b466d6-03fb-49d5-9404-f0009199fbe1" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1342.597516] env[68638]: DEBUG nova.network.neutron [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1342.613457] env[68638]: DEBUG nova.compute.manager [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1343.156839] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1343.157145] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1343.159356] env[68638]: INFO nova.compute.claims [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1343.184698] env[68638]: DEBUG nova.network.neutron [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1343.483665] env[68638]: DEBUG nova.network.neutron [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Updating instance_info_cache with network_info: [{"id": "62048a54-b6eb-4116-b9a7-6ca217e72373", "address": "fa:16:3e:bb:15:f8", "network": {"id": "8f6c6dbb-6826-4316-9c9a-c29a133aff01", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-2065959658-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50c3086679554087a646211f05a34950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62048a54-b6", "ovs_interfaceid": "62048a54-b6eb-4116-b9a7-6ca217e72373", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1343.981782] env[68638]: DEBUG nova.compute.manager [req-182f1258-5e71-410c-9ec7-b4b85a12165d req-00fcdaad-efc6-45f9-b3e5-8b343b1a2a36 service nova] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Received event network-changed-62048a54-b6eb-4116-b9a7-6ca217e72373 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1343.982031] env[68638]: DEBUG nova.compute.manager [req-182f1258-5e71-410c-9ec7-b4b85a12165d req-00fcdaad-efc6-45f9-b3e5-8b343b1a2a36 service nova] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Refreshing instance network info cache due to event network-changed-62048a54-b6eb-4116-b9a7-6ca217e72373. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1343.982210] env[68638]: DEBUG oslo_concurrency.lockutils [req-182f1258-5e71-410c-9ec7-b4b85a12165d req-00fcdaad-efc6-45f9-b3e5-8b343b1a2a36 service nova] Acquiring lock "refresh_cache-43b466d6-03fb-49d5-9404-f0009199fbe1" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1343.986182] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Releasing lock "refresh_cache-43b466d6-03fb-49d5-9404-f0009199fbe1" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1343.986451] env[68638]: DEBUG nova.compute.manager [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Instance network_info: |[{"id": "62048a54-b6eb-4116-b9a7-6ca217e72373", "address": "fa:16:3e:bb:15:f8", "network": {"id": "8f6c6dbb-6826-4316-9c9a-c29a133aff01", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-2065959658-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50c3086679554087a646211f05a34950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62048a54-b6", "ovs_interfaceid": "62048a54-b6eb-4116-b9a7-6ca217e72373", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1343.986696] env[68638]: DEBUG oslo_concurrency.lockutils [req-182f1258-5e71-410c-9ec7-b4b85a12165d req-00fcdaad-efc6-45f9-b3e5-8b343b1a2a36 service nova] Acquired lock "refresh_cache-43b466d6-03fb-49d5-9404-f0009199fbe1" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1343.986864] env[68638]: DEBUG nova.network.neutron [req-182f1258-5e71-410c-9ec7-b4b85a12165d req-00fcdaad-efc6-45f9-b3e5-8b343b1a2a36 service nova] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Refreshing network info cache for port 62048a54-b6eb-4116-b9a7-6ca217e72373 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1343.987974] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:15:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08fb4857-7f9b-4f97-86ef-415341fb595d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '62048a54-b6eb-4116-b9a7-6ca217e72373', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1343.995363] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Creating folder: Project (50c3086679554087a646211f05a34950). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1343.996418] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-74421234-2878-48c3-9031-86a47eae8f72 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.009603] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Created folder: Project (50c3086679554087a646211f05a34950) in parent group-v569734. [ 1344.009779] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Creating folder: Instances. Parent ref: group-v570063. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1344.009996] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7404b34d-2fec-43ed-9f16-0e50bf264ae8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.018756] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Created folder: Instances in parent group-v570063. [ 1344.018976] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1344.019205] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1344.019403] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da50be68-01c0-4916-b4eb-3037c2cd858d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.038966] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1344.038966] env[68638]: value = "task-2834665" [ 1344.038966] env[68638]: _type = "Task" [ 1344.038966] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.046108] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834665, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.213671] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c9264d6-ae22-4a71-b0b3-d4323c5c80bf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.221129] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b71dfad-230d-49c7-8bdd-8eea3f33c143 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.250835] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34dae1cd-99d1-47fd-9b5c-ab16de136aff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.258754] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c971c83-647f-46f1-a362-047d08c57bbf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.272846] env[68638]: DEBUG nova.compute.provider_tree [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1344.547964] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834665, 'name': CreateVM_Task, 'duration_secs': 0.301585} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.548158] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1344.548914] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.548996] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1344.550027] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1344.550027] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46fbe7c4-49d7-4f5f-8a07-504c880fb834 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.553646] env[68638]: DEBUG oslo_vmware.api [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Waiting for the task: (returnval){ [ 1344.553646] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52f7ad5e-a016-7cd4-e919-0e3b1608c41f" [ 1344.553646] env[68638]: _type = "Task" [ 1344.553646] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.564466] env[68638]: DEBUG oslo_vmware.api [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f7ad5e-a016-7cd4-e919-0e3b1608c41f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.675626] env[68638]: DEBUG nova.network.neutron [req-182f1258-5e71-410c-9ec7-b4b85a12165d req-00fcdaad-efc6-45f9-b3e5-8b343b1a2a36 service nova] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Updated VIF entry in instance network info cache for port 62048a54-b6eb-4116-b9a7-6ca217e72373. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1344.675994] env[68638]: DEBUG nova.network.neutron [req-182f1258-5e71-410c-9ec7-b4b85a12165d req-00fcdaad-efc6-45f9-b3e5-8b343b1a2a36 service nova] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Updating instance_info_cache with network_info: [{"id": "62048a54-b6eb-4116-b9a7-6ca217e72373", "address": "fa:16:3e:bb:15:f8", "network": {"id": "8f6c6dbb-6826-4316-9c9a-c29a133aff01", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-2065959658-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "50c3086679554087a646211f05a34950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62048a54-b6", "ovs_interfaceid": "62048a54-b6eb-4116-b9a7-6ca217e72373", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1344.775741] env[68638]: DEBUG nova.scheduler.client.report [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1345.063455] env[68638]: DEBUG oslo_vmware.api [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f7ad5e-a016-7cd4-e919-0e3b1608c41f, 'name': SearchDatastore_Task, 'duration_secs': 0.01262} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.063774] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1345.063935] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1345.064189] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.064341] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1345.064518] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1345.064783] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6eb5c60c-5bd2-42da-b074-d37f001162d1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.073568] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1345.073744] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1345.074468] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af62ea3b-8c9d-4b95-a07d-0bfbf22a5aeb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.079590] env[68638]: DEBUG oslo_vmware.api [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Waiting for the task: (returnval){ [ 1345.079590] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52a836ab-f3b5-1e74-6332-d1677644ad5f" [ 1345.079590] env[68638]: _type = "Task" [ 1345.079590] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.086799] env[68638]: DEBUG oslo_vmware.api [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a836ab-f3b5-1e74-6332-d1677644ad5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.178756] env[68638]: DEBUG oslo_concurrency.lockutils [req-182f1258-5e71-410c-9ec7-b4b85a12165d req-00fcdaad-efc6-45f9-b3e5-8b343b1a2a36 service nova] Releasing lock "refresh_cache-43b466d6-03fb-49d5-9404-f0009199fbe1" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1345.280038] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.123s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1345.280591] env[68638]: DEBUG nova.compute.manager [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1345.590132] env[68638]: DEBUG oslo_vmware.api [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52a836ab-f3b5-1e74-6332-d1677644ad5f, 'name': SearchDatastore_Task, 'duration_secs': 0.009882} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.590923] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8f3a1e5-9b2c-412f-9096-bc86a78b825b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.595770] env[68638]: DEBUG oslo_vmware.api [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Waiting for the task: (returnval){ [ 1345.595770] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d87669-61a5-e332-972b-c32b722765a7" [ 1345.595770] env[68638]: _type = "Task" [ 1345.595770] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.603098] env[68638]: DEBUG oslo_vmware.api [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d87669-61a5-e332-972b-c32b722765a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.785963] env[68638]: DEBUG nova.compute.utils [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1345.787894] env[68638]: DEBUG nova.compute.manager [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1345.787894] env[68638]: DEBUG nova.network.neutron [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1345.822638] env[68638]: DEBUG nova.policy [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0fb22fd94276463ebb001ec679a36fec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c238a05699ee42f9a3d69c16f0777ae9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1346.105604] env[68638]: DEBUG oslo_vmware.api [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d87669-61a5-e332-972b-c32b722765a7, 'name': SearchDatastore_Task, 'duration_secs': 0.010471} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.105890] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1346.106562] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 43b466d6-03fb-49d5-9404-f0009199fbe1/43b466d6-03fb-49d5-9404-f0009199fbe1.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1346.106562] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4ed79ddc-7aed-426d-aaf5-16a71a365735 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.113600] env[68638]: DEBUG oslo_vmware.api [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Waiting for the task: (returnval){ [ 1346.113600] env[68638]: value = "task-2834666" [ 1346.113600] env[68638]: _type = "Task" [ 1346.113600] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.121089] env[68638]: DEBUG oslo_vmware.api [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Task: {'id': task-2834666, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.151051] env[68638]: DEBUG nova.network.neutron [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Successfully created port: 32c36f62-8c15-481f-9b9e-613eafeaeae8 {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1346.291119] env[68638]: DEBUG nova.compute.manager [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1346.623556] env[68638]: DEBUG oslo_vmware.api [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Task: {'id': task-2834666, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.124387] env[68638]: DEBUG oslo_vmware.api [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Task: {'id': task-2834666, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.511014} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.124736] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 43b466d6-03fb-49d5-9404-f0009199fbe1/43b466d6-03fb-49d5-9404-f0009199fbe1.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1347.124807] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1347.125067] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b8ca22d7-8081-4073-9988-e9db840dafae {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.131706] env[68638]: DEBUG oslo_vmware.api [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Waiting for the task: (returnval){ [ 1347.131706] env[68638]: value = "task-2834667" [ 1347.131706] env[68638]: _type = "Task" [ 1347.131706] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.138799] env[68638]: DEBUG oslo_vmware.api [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Task: {'id': task-2834667, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.302713] env[68638]: DEBUG nova.compute.manager [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1347.350795] env[68638]: DEBUG nova.virt.hardware [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1347.351052] env[68638]: DEBUG nova.virt.hardware [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1347.351215] env[68638]: DEBUG nova.virt.hardware [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1347.351402] env[68638]: DEBUG nova.virt.hardware [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1347.351548] env[68638]: DEBUG nova.virt.hardware [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1347.351694] env[68638]: DEBUG nova.virt.hardware [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1347.351900] env[68638]: DEBUG nova.virt.hardware [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1347.352067] env[68638]: DEBUG nova.virt.hardware [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1347.352237] env[68638]: DEBUG nova.virt.hardware [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1347.352403] env[68638]: DEBUG nova.virt.hardware [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1347.352574] env[68638]: DEBUG nova.virt.hardware [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1347.353431] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a02f9c7e-4ab4-4f85-b25f-6885f70afd2e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.361644] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5338655-fa71-4ffb-a506-67eeeb99d568 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.549620] env[68638]: DEBUG nova.compute.manager [req-a6849aaa-82e2-4d52-bff5-c2fed0a28655 req-12c11b84-b999-431c-bcdf-ca18b3f4dcc1 service nova] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Received event network-vif-plugged-32c36f62-8c15-481f-9b9e-613eafeaeae8 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1347.549781] env[68638]: DEBUG oslo_concurrency.lockutils [req-a6849aaa-82e2-4d52-bff5-c2fed0a28655 req-12c11b84-b999-431c-bcdf-ca18b3f4dcc1 service nova] Acquiring lock "3db8e8d5-d427-426b-b2c7-ceeb6cf866d0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1347.549984] env[68638]: DEBUG oslo_concurrency.lockutils [req-a6849aaa-82e2-4d52-bff5-c2fed0a28655 req-12c11b84-b999-431c-bcdf-ca18b3f4dcc1 service nova] Lock "3db8e8d5-d427-426b-b2c7-ceeb6cf866d0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1347.550213] env[68638]: DEBUG oslo_concurrency.lockutils [req-a6849aaa-82e2-4d52-bff5-c2fed0a28655 req-12c11b84-b999-431c-bcdf-ca18b3f4dcc1 service nova] Lock "3db8e8d5-d427-426b-b2c7-ceeb6cf866d0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1347.550385] env[68638]: DEBUG nova.compute.manager [req-a6849aaa-82e2-4d52-bff5-c2fed0a28655 req-12c11b84-b999-431c-bcdf-ca18b3f4dcc1 service nova] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] No waiting events found dispatching network-vif-plugged-32c36f62-8c15-481f-9b9e-613eafeaeae8 {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1347.550544] env[68638]: WARNING nova.compute.manager [req-a6849aaa-82e2-4d52-bff5-c2fed0a28655 req-12c11b84-b999-431c-bcdf-ca18b3f4dcc1 service nova] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Received unexpected event network-vif-plugged-32c36f62-8c15-481f-9b9e-613eafeaeae8 for instance with vm_state building and task_state spawning. [ 1347.642813] env[68638]: DEBUG oslo_vmware.api [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Task: {'id': task-2834667, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060205} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.643096] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1347.643871] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac01464-2b45-4024-ac84-e1b2f7ed6704 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.667133] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] 43b466d6-03fb-49d5-9404-f0009199fbe1/43b466d6-03fb-49d5-9404-f0009199fbe1.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1347.668430] env[68638]: DEBUG nova.network.neutron [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Successfully updated port: 32c36f62-8c15-481f-9b9e-613eafeaeae8 {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1347.669989] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c3f5c32-7251-44f8-aa6a-bdd0c2ef9039 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.689684] env[68638]: DEBUG oslo_vmware.api [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Waiting for the task: (returnval){ [ 1347.689684] env[68638]: value = "task-2834668" [ 1347.689684] env[68638]: _type = "Task" [ 1347.689684] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.698293] env[68638]: DEBUG oslo_vmware.api [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Task: {'id': task-2834668, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.171489] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "refresh_cache-3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1348.171769] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired lock "refresh_cache-3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1348.171769] env[68638]: DEBUG nova.network.neutron [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1348.199127] env[68638]: DEBUG oslo_vmware.api [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Task: {'id': task-2834668, 'name': ReconfigVM_Task, 'duration_secs': 0.302491} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.199428] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Reconfigured VM instance instance-0000007b to attach disk [datastore1] 43b466d6-03fb-49d5-9404-f0009199fbe1/43b466d6-03fb-49d5-9404-f0009199fbe1.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1348.200038] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eb6bf756-6fa6-40b3-a52d-43e9f7a11a44 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.206254] env[68638]: DEBUG oslo_vmware.api [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Waiting for the task: (returnval){ [ 1348.206254] env[68638]: value = "task-2834669" [ 1348.206254] env[68638]: _type = "Task" [ 1348.206254] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.213286] env[68638]: DEBUG oslo_vmware.api [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Task: {'id': task-2834669, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.702456] env[68638]: DEBUG nova.network.neutron [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1348.717042] env[68638]: DEBUG oslo_vmware.api [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Task: {'id': task-2834669, 'name': Rename_Task, 'duration_secs': 0.133923} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.717042] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1348.717042] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6407e5a3-22a0-4267-935a-8e764f068190 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.723025] env[68638]: DEBUG oslo_vmware.api [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Waiting for the task: (returnval){ [ 1348.723025] env[68638]: value = "task-2834670" [ 1348.723025] env[68638]: _type = "Task" [ 1348.723025] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.730349] env[68638]: DEBUG oslo_vmware.api [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Task: {'id': task-2834670, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.829817] env[68638]: DEBUG nova.network.neutron [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Updating instance_info_cache with network_info: [{"id": "32c36f62-8c15-481f-9b9e-613eafeaeae8", "address": "fa:16:3e:2e:17:47", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32c36f62-8c", "ovs_interfaceid": "32c36f62-8c15-481f-9b9e-613eafeaeae8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1349.232735] env[68638]: DEBUG oslo_vmware.api [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Task: {'id': task-2834670, 'name': PowerOnVM_Task, 'duration_secs': 0.43443} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.233072] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1349.233208] env[68638]: INFO nova.compute.manager [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Took 7.74 seconds to spawn the instance on the hypervisor. [ 1349.233388] env[68638]: DEBUG nova.compute.manager [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1349.234142] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3df2866e-9a03-44a9-aea5-fb42181debec {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.333830] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Releasing lock "refresh_cache-3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1349.334323] env[68638]: DEBUG nova.compute.manager [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Instance network_info: |[{"id": "32c36f62-8c15-481f-9b9e-613eafeaeae8", "address": "fa:16:3e:2e:17:47", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32c36f62-8c", "ovs_interfaceid": "32c36f62-8c15-481f-9b9e-613eafeaeae8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1349.334766] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:17:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19598cc1-e105-4565-906a-09dde75e3fbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '32c36f62-8c15-481f-9b9e-613eafeaeae8', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1349.342157] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1349.342360] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1349.342583] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-52d2a8fd-7fd0-4711-89f3-e41fbd317a2c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.361654] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1349.361654] env[68638]: value = "task-2834671" [ 1349.361654] env[68638]: _type = "Task" [ 1349.361654] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.368766] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834671, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.575384] env[68638]: DEBUG nova.compute.manager [req-f5a8a681-6e99-4fa5-b13c-e1743996ea69 req-f9933a43-10f1-4f3f-9a85-8449f042e9be service nova] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Received event network-changed-32c36f62-8c15-481f-9b9e-613eafeaeae8 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1349.575723] env[68638]: DEBUG nova.compute.manager [req-f5a8a681-6e99-4fa5-b13c-e1743996ea69 req-f9933a43-10f1-4f3f-9a85-8449f042e9be service nova] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Refreshing instance network info cache due to event network-changed-32c36f62-8c15-481f-9b9e-613eafeaeae8. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1349.576056] env[68638]: DEBUG oslo_concurrency.lockutils [req-f5a8a681-6e99-4fa5-b13c-e1743996ea69 req-f9933a43-10f1-4f3f-9a85-8449f042e9be service nova] Acquiring lock "refresh_cache-3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1349.576236] env[68638]: DEBUG oslo_concurrency.lockutils [req-f5a8a681-6e99-4fa5-b13c-e1743996ea69 req-f9933a43-10f1-4f3f-9a85-8449f042e9be service nova] Acquired lock "refresh_cache-3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1349.576405] env[68638]: DEBUG nova.network.neutron [req-f5a8a681-6e99-4fa5-b13c-e1743996ea69 req-f9933a43-10f1-4f3f-9a85-8449f042e9be service nova] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Refreshing network info cache for port 32c36f62-8c15-481f-9b9e-613eafeaeae8 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1349.749919] env[68638]: INFO nova.compute.manager [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Took 12.43 seconds to build instance. [ 1349.871741] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834671, 'name': CreateVM_Task, 'duration_secs': 0.288222} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.871915] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1349.872611] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1349.872786] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1349.873121] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1349.873381] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74ffd45d-474b-4f79-8bf2-d385935e15d5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.877642] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1349.877642] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52d2a0a7-63cd-c6fd-1a65-3b45335efc53" [ 1349.877642] env[68638]: _type = "Task" [ 1349.877642] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.885170] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d2a0a7-63cd-c6fd-1a65-3b45335efc53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.247619] env[68638]: DEBUG nova.network.neutron [req-f5a8a681-6e99-4fa5-b13c-e1743996ea69 req-f9933a43-10f1-4f3f-9a85-8449f042e9be service nova] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Updated VIF entry in instance network info cache for port 32c36f62-8c15-481f-9b9e-613eafeaeae8. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1350.247976] env[68638]: DEBUG nova.network.neutron [req-f5a8a681-6e99-4fa5-b13c-e1743996ea69 req-f9933a43-10f1-4f3f-9a85-8449f042e9be service nova] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Updating instance_info_cache with network_info: [{"id": "32c36f62-8c15-481f-9b9e-613eafeaeae8", "address": "fa:16:3e:2e:17:47", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32c36f62-8c", "ovs_interfaceid": "32c36f62-8c15-481f-9b9e-613eafeaeae8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1350.250884] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c228c0a-5492-4ba5-bef6-15c9c0d26a16 tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Lock "43b466d6-03fb-49d5-9404-f0009199fbe1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.937s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1350.388975] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52d2a0a7-63cd-c6fd-1a65-3b45335efc53, 'name': SearchDatastore_Task, 'duration_secs': 0.00967} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.389274] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1350.389556] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1350.389832] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.390053] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1350.390270] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1350.390556] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dbd64320-d8a6-42cd-85fc-39dfa1e2b07c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.398234] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1350.398441] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1350.399244] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0ea259f-35e7-4f31-96b4-92e6543fe74a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.405515] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1350.405515] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52607dcb-4286-8c65-0ef0-245391aa273d" [ 1350.405515] env[68638]: _type = "Task" [ 1350.405515] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.412317] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52607dcb-4286-8c65-0ef0-245391aa273d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.466994] env[68638]: DEBUG oslo_concurrency.lockutils [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Acquiring lock "43b466d6-03fb-49d5-9404-f0009199fbe1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1350.467299] env[68638]: DEBUG oslo_concurrency.lockutils [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Lock "43b466d6-03fb-49d5-9404-f0009199fbe1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1350.467604] env[68638]: DEBUG oslo_concurrency.lockutils [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Acquiring lock "43b466d6-03fb-49d5-9404-f0009199fbe1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1350.467797] env[68638]: DEBUG oslo_concurrency.lockutils [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Lock "43b466d6-03fb-49d5-9404-f0009199fbe1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1350.468036] env[68638]: DEBUG oslo_concurrency.lockutils [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Lock "43b466d6-03fb-49d5-9404-f0009199fbe1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1350.470219] env[68638]: INFO nova.compute.manager [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Terminating instance [ 1350.750972] env[68638]: DEBUG oslo_concurrency.lockutils [req-f5a8a681-6e99-4fa5-b13c-e1743996ea69 req-f9933a43-10f1-4f3f-9a85-8449f042e9be service nova] Releasing lock "refresh_cache-3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1350.916500] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52607dcb-4286-8c65-0ef0-245391aa273d, 'name': SearchDatastore_Task, 'duration_secs': 0.007694} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.917266] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54843ddb-26f0-49d1-b971-4e08521b51b4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.922287] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1350.922287] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5246123a-93de-eb1c-2241-b2db04aa9a9a" [ 1350.922287] env[68638]: _type = "Task" [ 1350.922287] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.929330] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5246123a-93de-eb1c-2241-b2db04aa9a9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.974124] env[68638]: DEBUG nova.compute.manager [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1350.974346] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1350.975123] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf8302f-5c73-46c9-b536-5fc0f0d4125d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.981611] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1350.981833] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f0ae96fb-bb0a-474e-8c59-25f18c9cd44c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.987723] env[68638]: DEBUG oslo_vmware.api [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Waiting for the task: (returnval){ [ 1350.987723] env[68638]: value = "task-2834672" [ 1350.987723] env[68638]: _type = "Task" [ 1350.987723] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.995521] env[68638]: DEBUG oslo_vmware.api [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Task: {'id': task-2834672, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.433334] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5246123a-93de-eb1c-2241-b2db04aa9a9a, 'name': SearchDatastore_Task, 'duration_secs': 0.009093} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.433637] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1351.433805] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0/3db8e8d5-d427-426b-b2c7-ceeb6cf866d0.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1351.434065] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-41804e7b-3918-4c61-a5b2-9d53b4742060 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.440955] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1351.440955] env[68638]: value = "task-2834673" [ 1351.440955] env[68638]: _type = "Task" [ 1351.440955] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.448108] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834673, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.496339] env[68638]: DEBUG oslo_vmware.api [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Task: {'id': task-2834672, 'name': PowerOffVM_Task, 'duration_secs': 0.193554} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.496591] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1351.496759] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1351.497007] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-20032322-fd8a-439c-b1de-a3661e6e5aad {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.562011] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1351.562243] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1351.562435] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Deleting the datastore file [datastore1] 43b466d6-03fb-49d5-9404-f0009199fbe1 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1351.562694] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a58b2cfa-16ce-4ca5-8650-fb5d912d91f8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.569478] env[68638]: DEBUG oslo_vmware.api [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Waiting for the task: (returnval){ [ 1351.569478] env[68638]: value = "task-2834675" [ 1351.569478] env[68638]: _type = "Task" [ 1351.569478] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.576824] env[68638]: DEBUG oslo_vmware.api [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Task: {'id': task-2834675, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.953122] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834673, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.421971} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.953402] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0/3db8e8d5-d427-426b-b2c7-ceeb6cf866d0.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1351.953627] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1351.953881] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-66abc637-fa41-4f5c-bb9e-1eaf6fd5fc61 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.959256] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1351.959256] env[68638]: value = "task-2834676" [ 1351.959256] env[68638]: _type = "Task" [ 1351.959256] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.966355] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834676, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.079280] env[68638]: DEBUG oslo_vmware.api [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Task: {'id': task-2834675, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.274626} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.079427] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1352.079541] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1352.079718] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1352.079889] env[68638]: INFO nova.compute.manager [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1352.080155] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1352.080368] env[68638]: DEBUG nova.compute.manager [-] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1352.080464] env[68638]: DEBUG nova.network.neutron [-] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1352.451839] env[68638]: DEBUG nova.compute.manager [req-9f56e16a-4669-4da1-b9a0-2aad5146d501 req-40d485e6-94b9-45fb-b7e6-5c40dbf5253c service nova] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Received event network-vif-deleted-62048a54-b6eb-4116-b9a7-6ca217e72373 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1352.451839] env[68638]: INFO nova.compute.manager [req-9f56e16a-4669-4da1-b9a0-2aad5146d501 req-40d485e6-94b9-45fb-b7e6-5c40dbf5253c service nova] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Neutron deleted interface 62048a54-b6eb-4116-b9a7-6ca217e72373; detaching it from the instance and deleting it from the info cache [ 1352.452305] env[68638]: DEBUG nova.network.neutron [req-9f56e16a-4669-4da1-b9a0-2aad5146d501 req-40d485e6-94b9-45fb-b7e6-5c40dbf5253c service nova] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1352.469919] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834676, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.306258} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.470221] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1352.471041] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab3e1a0f-dea3-43a9-b06e-5fee3d8a15fb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.496533] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0/3db8e8d5-d427-426b-b2c7-ceeb6cf866d0.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1352.496844] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c8caf40-7cd6-4d24-9bed-18cf81c5ac78 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.517817] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1352.517817] env[68638]: value = "task-2834677" [ 1352.517817] env[68638]: _type = "Task" [ 1352.517817] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.526187] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834677, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.923795] env[68638]: DEBUG nova.network.neutron [-] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1352.955143] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-66afd03a-6051-49d9-82ec-5278ada5ca36 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.964333] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f02b1a-fdcc-4e6b-b853-35f6f5b28990 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.988399] env[68638]: DEBUG nova.compute.manager [req-9f56e16a-4669-4da1-b9a0-2aad5146d501 req-40d485e6-94b9-45fb-b7e6-5c40dbf5253c service nova] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Detach interface failed, port_id=62048a54-b6eb-4116-b9a7-6ca217e72373, reason: Instance 43b466d6-03fb-49d5-9404-f0009199fbe1 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1353.026907] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834677, 'name': ReconfigVM_Task, 'duration_secs': 0.263225} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.027219] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Reconfigured VM instance instance-0000007c to attach disk [datastore2] 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0/3db8e8d5-d427-426b-b2c7-ceeb6cf866d0.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1353.027858] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-176c5f86-0a87-45d5-8d7d-269346fa68ac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.034131] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1353.034131] env[68638]: value = "task-2834678" [ 1353.034131] env[68638]: _type = "Task" [ 1353.034131] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.041428] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834678, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.426429] env[68638]: INFO nova.compute.manager [-] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Took 1.35 seconds to deallocate network for instance. [ 1353.544894] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834678, 'name': Rename_Task, 'duration_secs': 0.124136} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.544894] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1353.545343] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5e366d76-0f5d-4bda-98ec-03bc7a19ae8b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.551315] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1353.551315] env[68638]: value = "task-2834679" [ 1353.551315] env[68638]: _type = "Task" [ 1353.551315] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.560354] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834679, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.933587] env[68638]: DEBUG oslo_concurrency.lockutils [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1353.933880] env[68638]: DEBUG oslo_concurrency.lockutils [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1353.934124] env[68638]: DEBUG nova.objects.instance [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Lazy-loading 'resources' on Instance uuid 43b466d6-03fb-49d5-9404-f0009199fbe1 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1354.060920] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834679, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.487045] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f03bda1-ae6c-442a-b66c-6225556f9a96 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.494668] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7467b327-75b4-4220-a71c-44741053d267 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.525293] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48eaa433-67a3-4d57-b7a3-1a2a2bbb0b40 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.532982] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c0423b-533c-4c39-be66-4e200f8504fc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.546120] env[68638]: DEBUG nova.compute.provider_tree [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1354.559569] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834679, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.060723] env[68638]: DEBUG oslo_vmware.api [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834679, 'name': PowerOnVM_Task, 'duration_secs': 1.05699} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.060979] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1355.061196] env[68638]: INFO nova.compute.manager [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Took 7.76 seconds to spawn the instance on the hypervisor. [ 1355.061378] env[68638]: DEBUG nova.compute.manager [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1355.062140] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-612069e7-0f98-4972-8cd0-f4bb3c5e3ffe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.065723] env[68638]: ERROR nova.scheduler.client.report [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] [req-74b9222a-50dd-40dd-a899-985f2ad26974] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-74b9222a-50dd-40dd-a899-985f2ad26974"}]} [ 1355.084822] env[68638]: DEBUG nova.scheduler.client.report [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1355.100331] env[68638]: DEBUG nova.scheduler.client.report [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1355.100594] env[68638]: DEBUG nova.compute.provider_tree [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1355.114988] env[68638]: DEBUG nova.scheduler.client.report [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1355.139780] env[68638]: DEBUG nova.scheduler.client.report [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1355.172382] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e782c82-50ef-4eb2-9207-9011fba4b1b5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.179770] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac1eceb-0c60-43cc-93f4-4fc6e576becd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.209706] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9145411e-c47d-4cc9-95d3-6eb51c6a2753 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.216322] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd62bc3e-0fd2-4932-9b54-b2f6b022881a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.229149] env[68638]: DEBUG nova.compute.provider_tree [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1355.580110] env[68638]: INFO nova.compute.manager [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Took 12.46 seconds to build instance. [ 1355.763572] env[68638]: DEBUG nova.scheduler.client.report [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 176 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1355.763857] env[68638]: DEBUG nova.compute.provider_tree [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 176 to 177 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1355.764053] env[68638]: DEBUG nova.compute.provider_tree [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1356.081639] env[68638]: DEBUG oslo_concurrency.lockutils [None req-e506a16d-cbe4-4cda-b907-acc0e1bda06a tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.971s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1356.270080] env[68638]: DEBUG oslo_concurrency.lockutils [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.336s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1356.293997] env[68638]: INFO nova.scheduler.client.report [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Deleted allocations for instance 43b466d6-03fb-49d5-9404-f0009199fbe1 [ 1356.340306] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1356.376146] env[68638]: DEBUG nova.compute.manager [req-d0157ad7-fc1f-4181-be30-84248bb82ea1 req-dd9984e2-509e-417b-afb2-c556fecba715 service nova] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Received event network-changed-32c36f62-8c15-481f-9b9e-613eafeaeae8 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1356.376146] env[68638]: DEBUG nova.compute.manager [req-d0157ad7-fc1f-4181-be30-84248bb82ea1 req-dd9984e2-509e-417b-afb2-c556fecba715 service nova] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Refreshing instance network info cache due to event network-changed-32c36f62-8c15-481f-9b9e-613eafeaeae8. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1356.376146] env[68638]: DEBUG oslo_concurrency.lockutils [req-d0157ad7-fc1f-4181-be30-84248bb82ea1 req-dd9984e2-509e-417b-afb2-c556fecba715 service nova] Acquiring lock "refresh_cache-3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1356.376146] env[68638]: DEBUG oslo_concurrency.lockutils [req-d0157ad7-fc1f-4181-be30-84248bb82ea1 req-dd9984e2-509e-417b-afb2-c556fecba715 service nova] Acquired lock "refresh_cache-3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1356.376146] env[68638]: DEBUG nova.network.neutron [req-d0157ad7-fc1f-4181-be30-84248bb82ea1 req-dd9984e2-509e-417b-afb2-c556fecba715 service nova] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Refreshing network info cache for port 32c36f62-8c15-481f-9b9e-613eafeaeae8 {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1356.802262] env[68638]: DEBUG oslo_concurrency.lockutils [None req-98529c03-4a94-465d-9e96-c1cd8cc8fbfa tempest-InstanceActionsV221TestJSON-1768115208 tempest-InstanceActionsV221TestJSON-1768115208-project-member] Lock "43b466d6-03fb-49d5-9404-f0009199fbe1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.335s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1357.101895] env[68638]: DEBUG nova.network.neutron [req-d0157ad7-fc1f-4181-be30-84248bb82ea1 req-dd9984e2-509e-417b-afb2-c556fecba715 service nova] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Updated VIF entry in instance network info cache for port 32c36f62-8c15-481f-9b9e-613eafeaeae8. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1357.102273] env[68638]: DEBUG nova.network.neutron [req-d0157ad7-fc1f-4181-be30-84248bb82ea1 req-dd9984e2-509e-417b-afb2-c556fecba715 service nova] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Updating instance_info_cache with network_info: [{"id": "32c36f62-8c15-481f-9b9e-613eafeaeae8", "address": "fa:16:3e:2e:17:47", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32c36f62-8c", "ovs_interfaceid": "32c36f62-8c15-481f-9b9e-613eafeaeae8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1357.605346] env[68638]: DEBUG oslo_concurrency.lockutils [req-d0157ad7-fc1f-4181-be30-84248bb82ea1 req-dd9984e2-509e-417b-afb2-c556fecba715 service nova] Releasing lock "refresh_cache-3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1360.340610] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1360.341800] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1362.137298] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1362.137586] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1362.340528] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1362.340702] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68638) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1362.640067] env[68638]: DEBUG nova.compute.manager [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1363.160511] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1363.160817] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1363.162715] env[68638]: INFO nova.compute.claims [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1363.340895] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1364.208476] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32fa9959-0acd-4c45-a3fb-6b7ec09e2029 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.216067] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeaea7f9-5133-4bd9-a400-b1b4e125b979 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.246136] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5df0840-b198-475a-aebe-9b3bd75d6079 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.253046] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682657f1-6d93-4463-8125-fe51c4ac432a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.265574] env[68638]: DEBUG nova.compute.provider_tree [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1364.335318] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1364.340089] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1364.768216] env[68638]: DEBUG nova.scheduler.client.report [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1365.273199] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.112s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1365.273742] env[68638]: DEBUG nova.compute.manager [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1365.340387] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager.update_available_resource {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1365.778447] env[68638]: DEBUG nova.compute.utils [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1365.779945] env[68638]: DEBUG nova.compute.manager [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1365.780588] env[68638]: DEBUG nova.network.neutron [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1365.827618] env[68638]: DEBUG nova.policy [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1fd95244c44c44219ef19304882b99d4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9da776668a424815986399da431ae74f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1365.843124] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1365.843357] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1365.843522] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1365.843671] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68638) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1365.844553] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-448a88b2-7220-4634-ae21-34b878cb5f36 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.852350] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e2cfee-4c82-4cd9-b9ea-277ef42a32fa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.867897] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed4b334-20b1-46eb-bc68-e244a8a7f5f3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.874198] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b77ae6-0638-4095-909c-fbff5c25388b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.904573] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180890MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=68638) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1365.904727] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1365.904941] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1366.067393] env[68638]: DEBUG nova.network.neutron [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Successfully created port: 2a6adb50-4ed4-4484-a4c6-2272e30f226a {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1366.283567] env[68638]: DEBUG nova.compute.manager [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1366.930065] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1366.930065] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1366.930065] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1366.930065] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1366.965718] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a9c6434-e4c5-411b-8cff-d89bdd368a21 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.973517] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-786eba29-c238-4b4c-9167-e5c1d3826564 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.002911] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3ddc1ad-4762-4ea6-8232-3441baf33019 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.010425] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d971a4d-8ceb-469f-b875-ff1d91f86aab {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.024214] env[68638]: DEBUG nova.compute.provider_tree [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1367.293381] env[68638]: DEBUG nova.compute.manager [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1367.317559] env[68638]: DEBUG nova.virt.hardware [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1367.317818] env[68638]: DEBUG nova.virt.hardware [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1367.317976] env[68638]: DEBUG nova.virt.hardware [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1367.318172] env[68638]: DEBUG nova.virt.hardware [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1367.318318] env[68638]: DEBUG nova.virt.hardware [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1367.318465] env[68638]: DEBUG nova.virt.hardware [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1367.318668] env[68638]: DEBUG nova.virt.hardware [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1367.318825] env[68638]: DEBUG nova.virt.hardware [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1367.318989] env[68638]: DEBUG nova.virt.hardware [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1367.319160] env[68638]: DEBUG nova.virt.hardware [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1367.319368] env[68638]: DEBUG nova.virt.hardware [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1367.320216] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ecd7c08-e091-42f2-8574-c506bc581fc5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.327972] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad9db61-0782-4613-a3a0-477fd3d27315 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.410729] env[68638]: DEBUG nova.compute.manager [req-bb86bc8e-738d-45fd-881f-3da3da2714d4 req-9c7b8eee-98a3-4291-8716-ef9a67f82ba0 service nova] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Received event network-vif-plugged-2a6adb50-4ed4-4484-a4c6-2272e30f226a {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1367.410804] env[68638]: DEBUG oslo_concurrency.lockutils [req-bb86bc8e-738d-45fd-881f-3da3da2714d4 req-9c7b8eee-98a3-4291-8716-ef9a67f82ba0 service nova] Acquiring lock "0e51f8b5-3ba0-408c-ab7a-9d29e99d7546-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1367.411019] env[68638]: DEBUG oslo_concurrency.lockutils [req-bb86bc8e-738d-45fd-881f-3da3da2714d4 req-9c7b8eee-98a3-4291-8716-ef9a67f82ba0 service nova] Lock "0e51f8b5-3ba0-408c-ab7a-9d29e99d7546-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1367.411207] env[68638]: DEBUG oslo_concurrency.lockutils [req-bb86bc8e-738d-45fd-881f-3da3da2714d4 req-9c7b8eee-98a3-4291-8716-ef9a67f82ba0 service nova] Lock "0e51f8b5-3ba0-408c-ab7a-9d29e99d7546-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1367.411478] env[68638]: DEBUG nova.compute.manager [req-bb86bc8e-738d-45fd-881f-3da3da2714d4 req-9c7b8eee-98a3-4291-8716-ef9a67f82ba0 service nova] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] No waiting events found dispatching network-vif-plugged-2a6adb50-4ed4-4484-a4c6-2272e30f226a {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1367.411691] env[68638]: WARNING nova.compute.manager [req-bb86bc8e-738d-45fd-881f-3da3da2714d4 req-9c7b8eee-98a3-4291-8716-ef9a67f82ba0 service nova] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Received unexpected event network-vif-plugged-2a6adb50-4ed4-4484-a4c6-2272e30f226a for instance with vm_state building and task_state spawning. [ 1367.488722] env[68638]: DEBUG nova.network.neutron [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Successfully updated port: 2a6adb50-4ed4-4484-a4c6-2272e30f226a {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1367.527599] env[68638]: DEBUG nova.scheduler.client.report [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1367.991072] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "refresh_cache-0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1367.991231] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquired lock "refresh_cache-0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1367.991324] env[68638]: DEBUG nova.network.neutron [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1368.032017] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68638) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1368.032223] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.127s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1368.521666] env[68638]: DEBUG nova.network.neutron [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1368.633823] env[68638]: DEBUG nova.network.neutron [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Updating instance_info_cache with network_info: [{"id": "2a6adb50-4ed4-4484-a4c6-2272e30f226a", "address": "fa:16:3e:10:24:f9", "network": {"id": "1011d63b-6b94-46e7-8fb7-2f1d20628113", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2061890536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9da776668a424815986399da431ae74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a6adb50-4e", "ovs_interfaceid": "2a6adb50-4ed4-4484-a4c6-2272e30f226a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1369.136500] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Releasing lock "refresh_cache-0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1369.136829] env[68638]: DEBUG nova.compute.manager [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Instance network_info: |[{"id": "2a6adb50-4ed4-4484-a4c6-2272e30f226a", "address": "fa:16:3e:10:24:f9", "network": {"id": "1011d63b-6b94-46e7-8fb7-2f1d20628113", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2061890536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9da776668a424815986399da431ae74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a6adb50-4e", "ovs_interfaceid": "2a6adb50-4ed4-4484-a4c6-2272e30f226a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1369.137324] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:10:24:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ded18042-834c-4792-b3e8-b1c377446432', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2a6adb50-4ed4-4484-a4c6-2272e30f226a', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1369.144810] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Creating folder: Project (9da776668a424815986399da431ae74f). Parent ref: group-v569734. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1369.145099] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-37f34d7f-f4f9-4f59-b4e7-d6b5757e38de {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.156482] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Created folder: Project (9da776668a424815986399da431ae74f) in parent group-v569734. [ 1369.156661] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Creating folder: Instances. Parent ref: group-v570067. {{(pid=68638) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1369.156882] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-70b6b140-904c-4d53-918a-21d67d00d768 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.165450] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Created folder: Instances in parent group-v570067. [ 1369.165663] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1369.165841] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1369.166033] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bdc72ce4-60fb-4ad5-ab61-82f867d27eba {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.183382] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1369.183382] env[68638]: value = "task-2834682" [ 1369.183382] env[68638]: _type = "Task" [ 1369.183382] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.190512] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834682, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.436402] env[68638]: DEBUG nova.compute.manager [req-095381e1-05ae-4d8e-adef-c400a2e014d2 req-75136e90-0f00-4b7d-b6ec-bbdbef2b6f67 service nova] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Received event network-changed-2a6adb50-4ed4-4484-a4c6-2272e30f226a {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1369.436402] env[68638]: DEBUG nova.compute.manager [req-095381e1-05ae-4d8e-adef-c400a2e014d2 req-75136e90-0f00-4b7d-b6ec-bbdbef2b6f67 service nova] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Refreshing instance network info cache due to event network-changed-2a6adb50-4ed4-4484-a4c6-2272e30f226a. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1369.436402] env[68638]: DEBUG oslo_concurrency.lockutils [req-095381e1-05ae-4d8e-adef-c400a2e014d2 req-75136e90-0f00-4b7d-b6ec-bbdbef2b6f67 service nova] Acquiring lock "refresh_cache-0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.436588] env[68638]: DEBUG oslo_concurrency.lockutils [req-095381e1-05ae-4d8e-adef-c400a2e014d2 req-75136e90-0f00-4b7d-b6ec-bbdbef2b6f67 service nova] Acquired lock "refresh_cache-0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1369.436616] env[68638]: DEBUG nova.network.neutron [req-095381e1-05ae-4d8e-adef-c400a2e014d2 req-75136e90-0f00-4b7d-b6ec-bbdbef2b6f67 service nova] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Refreshing network info cache for port 2a6adb50-4ed4-4484-a4c6-2272e30f226a {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1369.693943] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834682, 'name': CreateVM_Task, 'duration_secs': 0.298026} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.694301] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1369.694829] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.694999] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1369.695332] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1369.695592] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8713a4e-ca6f-4382-ae3f-794667a6f5ee {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.700282] env[68638]: DEBUG oslo_vmware.api [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1369.700282] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]523907d5-12ec-7eb8-798f-9519f7940608" [ 1369.700282] env[68638]: _type = "Task" [ 1369.700282] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.709169] env[68638]: DEBUG oslo_vmware.api [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523907d5-12ec-7eb8-798f-9519f7940608, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.107791] env[68638]: DEBUG nova.network.neutron [req-095381e1-05ae-4d8e-adef-c400a2e014d2 req-75136e90-0f00-4b7d-b6ec-bbdbef2b6f67 service nova] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Updated VIF entry in instance network info cache for port 2a6adb50-4ed4-4484-a4c6-2272e30f226a. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1370.108161] env[68638]: DEBUG nova.network.neutron [req-095381e1-05ae-4d8e-adef-c400a2e014d2 req-75136e90-0f00-4b7d-b6ec-bbdbef2b6f67 service nova] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Updating instance_info_cache with network_info: [{"id": "2a6adb50-4ed4-4484-a4c6-2272e30f226a", "address": "fa:16:3e:10:24:f9", "network": {"id": "1011d63b-6b94-46e7-8fb7-2f1d20628113", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2061890536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9da776668a424815986399da431ae74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a6adb50-4e", "ovs_interfaceid": "2a6adb50-4ed4-4484-a4c6-2272e30f226a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1370.210911] env[68638]: DEBUG oslo_vmware.api [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]523907d5-12ec-7eb8-798f-9519f7940608, 'name': SearchDatastore_Task, 'duration_secs': 0.009508} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.211267] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1370.211525] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1370.211841] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1370.212013] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1370.212216] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1370.212505] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9123ea4b-556f-4f10-99a2-f4f288a59aaa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.222713] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1370.222851] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1370.223566] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d060e975-65c8-43bb-a014-93e4e2d25417 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.228522] env[68638]: DEBUG oslo_vmware.api [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1370.228522] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52099e71-0fb5-c57a-6044-6ccfc959a936" [ 1370.228522] env[68638]: _type = "Task" [ 1370.228522] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.236101] env[68638]: DEBUG oslo_vmware.api [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52099e71-0fb5-c57a-6044-6ccfc959a936, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.611297] env[68638]: DEBUG oslo_concurrency.lockutils [req-095381e1-05ae-4d8e-adef-c400a2e014d2 req-75136e90-0f00-4b7d-b6ec-bbdbef2b6f67 service nova] Releasing lock "refresh_cache-0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1370.739012] env[68638]: DEBUG oslo_vmware.api [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52099e71-0fb5-c57a-6044-6ccfc959a936, 'name': SearchDatastore_Task, 'duration_secs': 0.008066} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.739822] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-096d2956-cda7-40bb-84e0-c6d6db0678a7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.744703] env[68638]: DEBUG oslo_vmware.api [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1370.744703] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e91b02-861a-ced2-3bdb-c26c2e5ef11e" [ 1370.744703] env[68638]: _type = "Task" [ 1370.744703] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.752194] env[68638]: DEBUG oslo_vmware.api [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e91b02-861a-ced2-3bdb-c26c2e5ef11e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.255863] env[68638]: DEBUG oslo_vmware.api [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52e91b02-861a-ced2-3bdb-c26c2e5ef11e, 'name': SearchDatastore_Task, 'duration_secs': 0.009861} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.255956] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1371.256251] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546/0e51f8b5-3ba0-408c-ab7a-9d29e99d7546.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1371.256525] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-18de4432-8258-4ad1-a742-71316e4aa01b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.265113] env[68638]: DEBUG oslo_vmware.api [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1371.265113] env[68638]: value = "task-2834683" [ 1371.265113] env[68638]: _type = "Task" [ 1371.265113] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.273046] env[68638]: DEBUG oslo_vmware.api [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834683, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.775144] env[68638]: DEBUG oslo_vmware.api [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834683, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.5058} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.775526] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546/0e51f8b5-3ba0-408c-ab7a-9d29e99d7546.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1371.775636] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1371.775884] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed655578-0cc4-4a35-9d09-0e48ae81a60a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.782018] env[68638]: DEBUG oslo_vmware.api [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1371.782018] env[68638]: value = "task-2834684" [ 1371.782018] env[68638]: _type = "Task" [ 1371.782018] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.789149] env[68638]: DEBUG oslo_vmware.api [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834684, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.291407] env[68638]: DEBUG oslo_vmware.api [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834684, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062044} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.291681] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1372.292437] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19546884-ea9e-488b-a395-a87a8b2e35a5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.313561] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546/0e51f8b5-3ba0-408c-ab7a-9d29e99d7546.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1372.313780] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc70935b-7c83-4adc-b27b-180121f8a7c1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.332457] env[68638]: DEBUG oslo_vmware.api [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1372.332457] env[68638]: value = "task-2834685" [ 1372.332457] env[68638]: _type = "Task" [ 1372.332457] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.339550] env[68638]: DEBUG oslo_vmware.api [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834685, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.843821] env[68638]: DEBUG oslo_vmware.api [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834685, 'name': ReconfigVM_Task, 'duration_secs': 0.273564} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.844209] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Reconfigured VM instance instance-0000007d to attach disk [datastore1] 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546/0e51f8b5-3ba0-408c-ab7a-9d29e99d7546.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1372.844690] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6711d2d9-c4b2-49d5-88d8-6616c9e0a1cb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.851409] env[68638]: DEBUG oslo_vmware.api [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1372.851409] env[68638]: value = "task-2834686" [ 1372.851409] env[68638]: _type = "Task" [ 1372.851409] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.858817] env[68638]: DEBUG oslo_vmware.api [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834686, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.361211] env[68638]: DEBUG oslo_vmware.api [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834686, 'name': Rename_Task, 'duration_secs': 0.137076} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.361513] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1373.361703] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3fa9ab47-a556-4e19-8dae-5905742e20ab {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.367593] env[68638]: DEBUG oslo_vmware.api [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1373.367593] env[68638]: value = "task-2834687" [ 1373.367593] env[68638]: _type = "Task" [ 1373.367593] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.374565] env[68638]: DEBUG oslo_vmware.api [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834687, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.877379] env[68638]: DEBUG oslo_vmware.api [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834687, 'name': PowerOnVM_Task, 'duration_secs': 0.427305} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.877661] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1373.877852] env[68638]: INFO nova.compute.manager [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Took 6.58 seconds to spawn the instance on the hypervisor. [ 1373.878037] env[68638]: DEBUG nova.compute.manager [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1373.878780] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0608a4-304d-4bfb-af3d-45fb58bb4655 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.397361] env[68638]: INFO nova.compute.manager [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Took 11.25 seconds to build instance. [ 1374.899138] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6601c616-bb17-485f-87c7-d2f6bbec701f tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.761s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1375.424287] env[68638]: DEBUG nova.compute.manager [req-0b5ac62e-786f-40d3-bb66-0f9ac50e0e06 req-68b5f3f2-80a4-4e4d-a9a4-5e33271c5a81 service nova] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Received event network-changed-2a6adb50-4ed4-4484-a4c6-2272e30f226a {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1375.424436] env[68638]: DEBUG nova.compute.manager [req-0b5ac62e-786f-40d3-bb66-0f9ac50e0e06 req-68b5f3f2-80a4-4e4d-a9a4-5e33271c5a81 service nova] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Refreshing instance network info cache due to event network-changed-2a6adb50-4ed4-4484-a4c6-2272e30f226a. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1375.424656] env[68638]: DEBUG oslo_concurrency.lockutils [req-0b5ac62e-786f-40d3-bb66-0f9ac50e0e06 req-68b5f3f2-80a4-4e4d-a9a4-5e33271c5a81 service nova] Acquiring lock "refresh_cache-0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1375.424803] env[68638]: DEBUG oslo_concurrency.lockutils [req-0b5ac62e-786f-40d3-bb66-0f9ac50e0e06 req-68b5f3f2-80a4-4e4d-a9a4-5e33271c5a81 service nova] Acquired lock "refresh_cache-0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1375.424965] env[68638]: DEBUG nova.network.neutron [req-0b5ac62e-786f-40d3-bb66-0f9ac50e0e06 req-68b5f3f2-80a4-4e4d-a9a4-5e33271c5a81 service nova] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Refreshing network info cache for port 2a6adb50-4ed4-4484-a4c6-2272e30f226a {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1376.115673] env[68638]: DEBUG nova.network.neutron [req-0b5ac62e-786f-40d3-bb66-0f9ac50e0e06 req-68b5f3f2-80a4-4e4d-a9a4-5e33271c5a81 service nova] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Updated VIF entry in instance network info cache for port 2a6adb50-4ed4-4484-a4c6-2272e30f226a. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1376.116046] env[68638]: DEBUG nova.network.neutron [req-0b5ac62e-786f-40d3-bb66-0f9ac50e0e06 req-68b5f3f2-80a4-4e4d-a9a4-5e33271c5a81 service nova] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Updating instance_info_cache with network_info: [{"id": "2a6adb50-4ed4-4484-a4c6-2272e30f226a", "address": "fa:16:3e:10:24:f9", "network": {"id": "1011d63b-6b94-46e7-8fb7-2f1d20628113", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2061890536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9da776668a424815986399da431ae74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a6adb50-4e", "ovs_interfaceid": "2a6adb50-4ed4-4484-a4c6-2272e30f226a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1376.619022] env[68638]: DEBUG oslo_concurrency.lockutils [req-0b5ac62e-786f-40d3-bb66-0f9ac50e0e06 req-68b5f3f2-80a4-4e4d-a9a4-5e33271c5a81 service nova] Releasing lock "refresh_cache-0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1381.027896] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1394.656068] env[68638]: DEBUG nova.compute.manager [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Stashing vm_state: active {{(pid=68638) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1395.177227] env[68638]: DEBUG oslo_concurrency.lockutils [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1395.177527] env[68638]: DEBUG oslo_concurrency.lockutils [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1395.682749] env[68638]: INFO nova.compute.claims [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1396.188963] env[68638]: INFO nova.compute.resource_tracker [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Updating resource usage from migration f8793f1f-cd5a-4e92-88e8-48e2e5869d36 [ 1396.238113] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df729c5a-a32c-4930-96b0-009889f6588b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.245962] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57449542-9702-44ae-aa75-987ce61a8371 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.274852] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40546d37-0d32-4fcb-956c-f81dcf35a9b6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.281560] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c46e47-5cfc-4d6c-80ec-e3b691000fdd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.294903] env[68638]: DEBUG nova.compute.provider_tree [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1396.797562] env[68638]: DEBUG nova.scheduler.client.report [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1397.302548] env[68638]: DEBUG oslo_concurrency.lockutils [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.125s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1397.302865] env[68638]: INFO nova.compute.manager [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Migrating [ 1397.818606] env[68638]: DEBUG oslo_concurrency.lockutils [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "refresh_cache-3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.818935] env[68638]: DEBUG oslo_concurrency.lockutils [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired lock "refresh_cache-3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1397.818974] env[68638]: DEBUG nova.network.neutron [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1398.508031] env[68638]: DEBUG nova.network.neutron [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Updating instance_info_cache with network_info: [{"id": "32c36f62-8c15-481f-9b9e-613eafeaeae8", "address": "fa:16:3e:2e:17:47", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32c36f62-8c", "ovs_interfaceid": "32c36f62-8c15-481f-9b9e-613eafeaeae8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1399.010956] env[68638]: DEBUG oslo_concurrency.lockutils [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Releasing lock "refresh_cache-3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1400.526362] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb19ae76-8b79-4ff3-a194-5eb35435b5c8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.544842] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Updating instance '3db8e8d5-d427-426b-b2c7-ceeb6cf866d0' progress to 0 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1401.052187] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1401.052517] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-437527d5-0fb2-4e41-9c04-5c3eb636ae12 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.059607] env[68638]: DEBUG oslo_vmware.api [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1401.059607] env[68638]: value = "task-2834688" [ 1401.059607] env[68638]: _type = "Task" [ 1401.059607] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.067425] env[68638]: DEBUG oslo_vmware.api [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834688, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.569302] env[68638]: DEBUG oslo_vmware.api [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834688, 'name': PowerOffVM_Task, 'duration_secs': 0.152589} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.569680] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1401.569813] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Updating instance '3db8e8d5-d427-426b-b2c7-ceeb6cf866d0' progress to 17 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1402.076256] env[68638]: DEBUG nova.virt.hardware [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1402.076526] env[68638]: DEBUG nova.virt.hardware [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1402.076678] env[68638]: DEBUG nova.virt.hardware [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1402.076864] env[68638]: DEBUG nova.virt.hardware [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1402.077021] env[68638]: DEBUG nova.virt.hardware [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1402.077172] env[68638]: DEBUG nova.virt.hardware [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1402.077389] env[68638]: DEBUG nova.virt.hardware [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1402.077610] env[68638]: DEBUG nova.virt.hardware [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1402.077797] env[68638]: DEBUG nova.virt.hardware [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1402.077968] env[68638]: DEBUG nova.virt.hardware [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1402.078166] env[68638]: DEBUG nova.virt.hardware [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1402.083358] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5cf824d4-3e53-4527-a9e4-d898b21c9bc8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.101447] env[68638]: DEBUG oslo_vmware.api [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1402.101447] env[68638]: value = "task-2834689" [ 1402.101447] env[68638]: _type = "Task" [ 1402.101447] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.109466] env[68638]: DEBUG oslo_vmware.api [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834689, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.610606] env[68638]: DEBUG oslo_vmware.api [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834689, 'name': ReconfigVM_Task, 'duration_secs': 0.152747} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.610952] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Updating instance '3db8e8d5-d427-426b-b2c7-ceeb6cf866d0' progress to 33 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1403.116965] env[68638]: DEBUG nova.virt.hardware [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1403.117527] env[68638]: DEBUG nova.virt.hardware [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1403.117759] env[68638]: DEBUG nova.virt.hardware [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1403.117992] env[68638]: DEBUG nova.virt.hardware [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1403.118186] env[68638]: DEBUG nova.virt.hardware [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1403.118345] env[68638]: DEBUG nova.virt.hardware [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1403.118556] env[68638]: DEBUG nova.virt.hardware [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1403.118733] env[68638]: DEBUG nova.virt.hardware [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1403.118907] env[68638]: DEBUG nova.virt.hardware [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1403.119088] env[68638]: DEBUG nova.virt.hardware [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1403.119269] env[68638]: DEBUG nova.virt.hardware [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1403.124665] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Reconfiguring VM instance instance-0000007c to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1403.124962] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a5e3cde-18a3-496b-b1fc-c532cfd0af15 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.143178] env[68638]: DEBUG oslo_vmware.api [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1403.143178] env[68638]: value = "task-2834690" [ 1403.143178] env[68638]: _type = "Task" [ 1403.143178] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.150455] env[68638]: DEBUG oslo_vmware.api [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834690, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.653284] env[68638]: DEBUG oslo_vmware.api [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834690, 'name': ReconfigVM_Task, 'duration_secs': 0.153363} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.653609] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Reconfigured VM instance instance-0000007c to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1403.654314] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7adcd9be-ecdd-45df-bc51-f23a3641cf1e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.675734] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0/3db8e8d5-d427-426b-b2c7-ceeb6cf866d0.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1403.675970] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78587e2d-0df5-408d-9ea4-4dbbee173c29 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.693424] env[68638]: DEBUG oslo_vmware.api [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1403.693424] env[68638]: value = "task-2834691" [ 1403.693424] env[68638]: _type = "Task" [ 1403.693424] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.700726] env[68638]: DEBUG oslo_vmware.api [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834691, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.203240] env[68638]: DEBUG oslo_vmware.api [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834691, 'name': ReconfigVM_Task, 'duration_secs': 0.24035} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.203534] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Reconfigured VM instance instance-0000007c to attach disk [datastore2] 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0/3db8e8d5-d427-426b-b2c7-ceeb6cf866d0.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1404.203796] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Updating instance '3db8e8d5-d427-426b-b2c7-ceeb6cf866d0' progress to 50 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1404.710425] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83f7158-a827-4b11-a3e0-3f19735c919d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.730874] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1337b3f6-89a0-4a98-bf52-51680349f99a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.747654] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Updating instance '3db8e8d5-d427-426b-b2c7-ceeb6cf866d0' progress to 67 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1405.285501] env[68638]: DEBUG nova.network.neutron [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Port 32c36f62-8c15-481f-9b9e-613eafeaeae8 binding to destination host cpu-1 is already ACTIVE {{(pid=68638) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1406.307439] env[68638]: DEBUG oslo_concurrency.lockutils [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "3db8e8d5-d427-426b-b2c7-ceeb6cf866d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1406.307792] env[68638]: DEBUG oslo_concurrency.lockutils [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "3db8e8d5-d427-426b-b2c7-ceeb6cf866d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1406.307842] env[68638]: DEBUG oslo_concurrency.lockutils [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "3db8e8d5-d427-426b-b2c7-ceeb6cf866d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1407.340943] env[68638]: DEBUG oslo_concurrency.lockutils [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "refresh_cache-3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1407.341292] env[68638]: DEBUG oslo_concurrency.lockutils [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired lock "refresh_cache-3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1407.341362] env[68638]: DEBUG nova.network.neutron [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1408.025877] env[68638]: DEBUG nova.network.neutron [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Updating instance_info_cache with network_info: [{"id": "32c36f62-8c15-481f-9b9e-613eafeaeae8", "address": "fa:16:3e:2e:17:47", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32c36f62-8c", "ovs_interfaceid": "32c36f62-8c15-481f-9b9e-613eafeaeae8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1408.529206] env[68638]: DEBUG oslo_concurrency.lockutils [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Releasing lock "refresh_cache-3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1409.055541] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cb2508f-036d-4e90-a493-5557cb7cc0b0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.075822] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1f9efe-e082-40a9-9004-888f68ba1571 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.082340] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Updating instance '3db8e8d5-d427-426b-b2c7-ceeb6cf866d0' progress to 83 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1409.588139] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1409.588512] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e7bfe17e-151b-4aa3-bb2a-d3f80c8de933 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.595779] env[68638]: DEBUG oslo_vmware.api [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1409.595779] env[68638]: value = "task-2834692" [ 1409.595779] env[68638]: _type = "Task" [ 1409.595779] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.603576] env[68638]: DEBUG oslo_vmware.api [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834692, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.106164] env[68638]: DEBUG oslo_vmware.api [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834692, 'name': PowerOnVM_Task, 'duration_secs': 0.347173} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.106441] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1410.106625] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-96ce5baf-8d89-4dc5-83c1-6931ee7e267c tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Updating instance '3db8e8d5-d427-426b-b2c7-ceeb6cf866d0' progress to 100 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1411.696261] env[68638]: DEBUG nova.compute.manager [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1411.697166] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d9f48aa-1129-4aa8-a260-327d51d0d25f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.207047] env[68638]: INFO nova.compute.manager [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] instance snapshotting [ 1412.207608] env[68638]: DEBUG nova.objects.instance [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lazy-loading 'flavor' on Instance uuid 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1412.713400] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5bc0e66-b54c-4f5e-b10a-455857ba7c25 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.732194] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e27e6c4e-66ab-48f9-8809-d9146d5c36ac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.947519] env[68638]: DEBUG nova.network.neutron [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Port 32c36f62-8c15-481f-9b9e-613eafeaeae8 binding to destination host cpu-1 is already ACTIVE {{(pid=68638) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1412.947790] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "refresh_cache-3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.947944] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired lock "refresh_cache-3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1412.948121] env[68638]: DEBUG nova.network.neutron [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1413.242566] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Creating Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1413.242924] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c2660355-1cdd-4887-a98f-66db91d741a0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.250520] env[68638]: DEBUG oslo_vmware.api [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1413.250520] env[68638]: value = "task-2834693" [ 1413.250520] env[68638]: _type = "Task" [ 1413.250520] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.258807] env[68638]: DEBUG oslo_vmware.api [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834693, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.650607] env[68638]: DEBUG nova.network.neutron [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Updating instance_info_cache with network_info: [{"id": "32c36f62-8c15-481f-9b9e-613eafeaeae8", "address": "fa:16:3e:2e:17:47", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32c36f62-8c", "ovs_interfaceid": "32c36f62-8c15-481f-9b9e-613eafeaeae8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1413.760319] env[68638]: DEBUG oslo_vmware.api [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834693, 'name': CreateSnapshot_Task, 'duration_secs': 0.435504} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.760655] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Created Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1413.761385] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-632dd6b6-46f0-4a52-96a5-b9f04c8faa4d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.153756] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Releasing lock "refresh_cache-3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1414.277598] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Creating linked-clone VM from snapshot {{(pid=68638) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1414.277897] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f0868282-286e-4aa2-a587-138039355393 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.286467] env[68638]: DEBUG oslo_vmware.api [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1414.286467] env[68638]: value = "task-2834694" [ 1414.286467] env[68638]: _type = "Task" [ 1414.286467] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.293993] env[68638]: DEBUG oslo_vmware.api [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834694, 'name': CloneVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.656891] env[68638]: DEBUG nova.compute.manager [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=68638) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1414.657200] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1414.657482] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1414.798827] env[68638]: DEBUG oslo_vmware.api [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834694, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.160617] env[68638]: DEBUG nova.objects.instance [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lazy-loading 'migration_context' on Instance uuid 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1415.296821] env[68638]: DEBUG oslo_vmware.api [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834694, 'name': CloneVM_Task, 'duration_secs': 0.916477} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.297121] env[68638]: INFO nova.virt.vmwareapi.vmops [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Created linked-clone VM from snapshot [ 1415.297865] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c8ee964-b129-46e9-acb5-c1088ccc97de {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.305013] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Uploading image f08ceddd-c61f-455e-9d9a-7b845f2c1218 {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1415.324620] env[68638]: DEBUG oslo_vmware.rw_handles [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1415.324620] env[68638]: value = "vm-570071" [ 1415.324620] env[68638]: _type = "VirtualMachine" [ 1415.324620] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1415.324904] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a1fdbd50-6cc8-4bad-be60-c65d3062c3d3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.330817] env[68638]: DEBUG oslo_vmware.rw_handles [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lease: (returnval){ [ 1415.330817] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e17362-edbb-80ff-94bc-928914162d0f" [ 1415.330817] env[68638]: _type = "HttpNfcLease" [ 1415.330817] env[68638]: } obtained for exporting VM: (result){ [ 1415.330817] env[68638]: value = "vm-570071" [ 1415.330817] env[68638]: _type = "VirtualMachine" [ 1415.330817] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1415.331042] env[68638]: DEBUG oslo_vmware.api [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the lease: (returnval){ [ 1415.331042] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e17362-edbb-80ff-94bc-928914162d0f" [ 1415.331042] env[68638]: _type = "HttpNfcLease" [ 1415.331042] env[68638]: } to be ready. {{(pid=68638) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1415.336675] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1415.336675] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e17362-edbb-80ff-94bc-928914162d0f" [ 1415.336675] env[68638]: _type = "HttpNfcLease" [ 1415.336675] env[68638]: } is initializing. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1415.714798] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7502e5e-e4e1-42c7-bb68-4cfb4b22f6e9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.722324] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-868923f2-3b57-40de-b599-66144ee59be2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.751798] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3999d40f-cf69-4c58-904e-91ac2ae9597b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.758809] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228af4b9-abfd-4de3-a1bf-59460c3e6129 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.771375] env[68638]: DEBUG nova.compute.provider_tree [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1415.838757] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1415.838757] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e17362-edbb-80ff-94bc-928914162d0f" [ 1415.838757] env[68638]: _type = "HttpNfcLease" [ 1415.838757] env[68638]: } is ready. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1415.839162] env[68638]: DEBUG oslo_vmware.rw_handles [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1415.839162] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52e17362-edbb-80ff-94bc-928914162d0f" [ 1415.839162] env[68638]: _type = "HttpNfcLease" [ 1415.839162] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1415.839906] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c874101-cff2-44da-960e-0360ed7b8536 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.848700] env[68638]: DEBUG oslo_vmware.rw_handles [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ec9dcc-0d24-04b5-7cc5-28fab2b6f77c/disk-0.vmdk from lease info. {{(pid=68638) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1415.848906] env[68638]: DEBUG oslo_vmware.rw_handles [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ec9dcc-0d24-04b5-7cc5-28fab2b6f77c/disk-0.vmdk for reading. {{(pid=68638) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1415.943665] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-91a0adc5-9e07-4b3b-918b-57458c807a7b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.274384] env[68638]: DEBUG nova.scheduler.client.report [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1417.289532] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.632s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1418.339525] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1418.832241] env[68638]: INFO nova.compute.manager [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Swapping old allocation on dict_keys(['a03d7c1f-9953-43da-98b9-91e5cea1f9ff']) held by migration f8793f1f-cd5a-4e92-88e8-48e2e5869d36 for instance [ 1418.854686] env[68638]: DEBUG nova.scheduler.client.report [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Overwriting current allocation {'allocations': {'a03d7c1f-9953-43da-98b9-91e5cea1f9ff': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 180}}, 'project_id': 'c238a05699ee42f9a3d69c16f0777ae9', 'user_id': '0fb22fd94276463ebb001ec679a36fec', 'consumer_generation': 1} on consumer 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0 {{(pid=68638) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1418.948513] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "refresh_cache-3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1418.948711] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired lock "refresh_cache-3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1418.948893] env[68638]: DEBUG nova.network.neutron [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1419.665628] env[68638]: DEBUG nova.network.neutron [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Updating instance_info_cache with network_info: [{"id": "32c36f62-8c15-481f-9b9e-613eafeaeae8", "address": "fa:16:3e:2e:17:47", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32c36f62-8c", "ovs_interfaceid": "32c36f62-8c15-481f-9b9e-613eafeaeae8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1420.168652] env[68638]: DEBUG oslo_concurrency.lockutils [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Releasing lock "refresh_cache-3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1420.168652] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1420.168996] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-764c4e15-f872-4a72-b490-e1e1173dc0e3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.176060] env[68638]: DEBUG oslo_vmware.api [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1420.176060] env[68638]: value = "task-2834696" [ 1420.176060] env[68638]: _type = "Task" [ 1420.176060] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.184338] env[68638]: DEBUG oslo_vmware.api [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834696, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.340695] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1420.686669] env[68638]: DEBUG oslo_vmware.api [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834696, 'name': PowerOffVM_Task, 'duration_secs': 0.234578} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.687108] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1420.687700] env[68638]: DEBUG nova.virt.hardware [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1420.687954] env[68638]: DEBUG nova.virt.hardware [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1420.688166] env[68638]: DEBUG nova.virt.hardware [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1420.688387] env[68638]: DEBUG nova.virt.hardware [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1420.688571] env[68638]: DEBUG nova.virt.hardware [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1420.688751] env[68638]: DEBUG nova.virt.hardware [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1420.688987] env[68638]: DEBUG nova.virt.hardware [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1420.689201] env[68638]: DEBUG nova.virt.hardware [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1420.689402] env[68638]: DEBUG nova.virt.hardware [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1420.689594] env[68638]: DEBUG nova.virt.hardware [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1420.689807] env[68638]: DEBUG nova.virt.hardware [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1420.695239] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6abc8228-1f97-47d9-965e-49fda22ee400 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.713841] env[68638]: DEBUG oslo_vmware.api [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1420.713841] env[68638]: value = "task-2834697" [ 1420.713841] env[68638]: _type = "Task" [ 1420.713841] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.723547] env[68638]: DEBUG oslo_vmware.api [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834697, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.224089] env[68638]: DEBUG oslo_vmware.api [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834697, 'name': ReconfigVM_Task, 'duration_secs': 0.13484} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.224926] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b27ec1-45c3-44c7-82a3-23b6401b420d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.243273] env[68638]: DEBUG nova.virt.hardware [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1421.243538] env[68638]: DEBUG nova.virt.hardware [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1421.243691] env[68638]: DEBUG nova.virt.hardware [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1421.243878] env[68638]: DEBUG nova.virt.hardware [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1421.244043] env[68638]: DEBUG nova.virt.hardware [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1421.244201] env[68638]: DEBUG nova.virt.hardware [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1421.244417] env[68638]: DEBUG nova.virt.hardware [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1421.244572] env[68638]: DEBUG nova.virt.hardware [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1421.244742] env[68638]: DEBUG nova.virt.hardware [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1421.244905] env[68638]: DEBUG nova.virt.hardware [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1421.245164] env[68638]: DEBUG nova.virt.hardware [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1421.245981] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e7349aa-19fc-4d76-b9b2-c43e216833d4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.252256] env[68638]: DEBUG oslo_vmware.api [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1421.252256] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]521fd8ea-12f3-274e-bb2c-18f735eac36b" [ 1421.252256] env[68638]: _type = "Task" [ 1421.252256] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.260252] env[68638]: DEBUG oslo_vmware.api [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]521fd8ea-12f3-274e-bb2c-18f735eac36b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.341011] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1421.762405] env[68638]: DEBUG oslo_vmware.api [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]521fd8ea-12f3-274e-bb2c-18f735eac36b, 'name': SearchDatastore_Task, 'duration_secs': 0.013796} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.767788] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Reconfiguring VM instance instance-0000007c to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1421.768089] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56953a8f-19f3-4e2e-a86e-7fb83c471044 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.786150] env[68638]: DEBUG oslo_vmware.api [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1421.786150] env[68638]: value = "task-2834698" [ 1421.786150] env[68638]: _type = "Task" [ 1421.786150] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.794188] env[68638]: DEBUG oslo_vmware.api [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834698, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.297905] env[68638]: DEBUG oslo_vmware.api [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834698, 'name': ReconfigVM_Task, 'duration_secs': 0.36206} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.298232] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Reconfigured VM instance instance-0000007c to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1422.299170] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-855fd48c-66b5-4acf-bfaf-945eaeee3700 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.328014] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0/3db8e8d5-d427-426b-b2c7-ceeb6cf866d0.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1422.328401] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c037a0ef-fc38-46e8-8934-ce9b0292f1be {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.345905] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1422.346198] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68638) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1422.352206] env[68638]: DEBUG oslo_vmware.api [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1422.352206] env[68638]: value = "task-2834699" [ 1422.352206] env[68638]: _type = "Task" [ 1422.352206] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.362378] env[68638]: DEBUG oslo_vmware.api [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834699, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.861960] env[68638]: DEBUG oslo_vmware.api [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834699, 'name': ReconfigVM_Task, 'duration_secs': 0.272417} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.862371] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Reconfigured VM instance instance-0000007c to attach disk [datastore2] 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0/3db8e8d5-d427-426b-b2c7-ceeb6cf866d0.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1422.863110] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-408c00a7-fc82-4543-ad98-24c7dc9f83ff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.881444] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63459966-e65f-4783-8d1e-f97113f86562 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.900610] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a8e4d9f-1463-4983-9537-c47743e8b998 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.918963] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4196c609-19ec-4507-9d57-ba4c0c9930c9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.926141] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1422.926409] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3466b80-14ae-4a85-9b78-44f27d94b928 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.932828] env[68638]: DEBUG oslo_vmware.api [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1422.932828] env[68638]: value = "task-2834700" [ 1422.932828] env[68638]: _type = "Task" [ 1422.932828] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.940372] env[68638]: DEBUG oslo_vmware.api [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834700, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.094068] env[68638]: DEBUG oslo_vmware.rw_handles [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ec9dcc-0d24-04b5-7cc5-28fab2b6f77c/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1423.095386] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51b668c0-9761-4a13-b834-6a281d2b4e41 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.101406] env[68638]: DEBUG oslo_vmware.rw_handles [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ec9dcc-0d24-04b5-7cc5-28fab2b6f77c/disk-0.vmdk is in state: ready. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1423.101567] env[68638]: ERROR oslo_vmware.rw_handles [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ec9dcc-0d24-04b5-7cc5-28fab2b6f77c/disk-0.vmdk due to incomplete transfer. [ 1423.101769] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-daf99a5b-e002-4652-90cc-281c5acfd18c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.109322] env[68638]: DEBUG oslo_vmware.rw_handles [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ec9dcc-0d24-04b5-7cc5-28fab2b6f77c/disk-0.vmdk. {{(pid=68638) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1423.109517] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Uploaded image f08ceddd-c61f-455e-9d9a-7b845f2c1218 to the Glance image server {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1423.111766] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Destroying the VM {{(pid=68638) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1423.112035] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7454ad30-4508-4afc-ba04-aeac07e0e8dc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.117535] env[68638]: DEBUG oslo_vmware.api [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1423.117535] env[68638]: value = "task-2834701" [ 1423.117535] env[68638]: _type = "Task" [ 1423.117535] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.124890] env[68638]: DEBUG oslo_vmware.api [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834701, 'name': Destroy_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.341160] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1423.443133] env[68638]: DEBUG oslo_vmware.api [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834700, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.627163] env[68638]: DEBUG oslo_vmware.api [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834701, 'name': Destroy_Task, 'duration_secs': 0.361063} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.627442] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Destroyed the VM [ 1423.627684] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Deleting Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1423.627933] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-7d7a0755-e152-496e-bce5-ee245b20fa1d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.633751] env[68638]: DEBUG oslo_vmware.api [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1423.633751] env[68638]: value = "task-2834702" [ 1423.633751] env[68638]: _type = "Task" [ 1423.633751] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.640815] env[68638]: DEBUG oslo_vmware.api [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834702, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.943697] env[68638]: DEBUG oslo_vmware.api [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834700, 'name': PowerOnVM_Task, 'duration_secs': 1.00321} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.944147] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1424.143124] env[68638]: DEBUG oslo_vmware.api [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834702, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.340505] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1424.644490] env[68638]: DEBUG oslo_vmware.api [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834702, 'name': RemoveSnapshot_Task, 'duration_secs': 0.715455} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.644834] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Deleted Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1424.645078] env[68638]: INFO nova.compute.manager [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Took 11.93 seconds to snapshot the instance on the hypervisor. [ 1424.954376] env[68638]: INFO nova.compute.manager [None req-5b772ed5-bb3f-4ee0-9404-941e13af09fd tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Updating instance to original state: 'active' [ 1425.180172] env[68638]: DEBUG nova.compute.manager [None req-1bb01f29-9b06-48d4-8e10-e8a65148ed24 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Found 1 images (rotation: 2) {{(pid=68638) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1426.335886] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1426.340561] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1426.340712] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Cleaning up deleted instances {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11753}} [ 1426.489597] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1426.489799] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1426.490062] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "3db8e8d5-d427-426b-b2c7-ceeb6cf866d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1426.490259] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "3db8e8d5-d427-426b-b2c7-ceeb6cf866d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1426.490433] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "3db8e8d5-d427-426b-b2c7-ceeb6cf866d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1426.492497] env[68638]: INFO nova.compute.manager [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Terminating instance [ 1426.545388] env[68638]: DEBUG nova.compute.manager [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1426.546687] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bed7547-d8d5-4157-b998-c00ddd7742ea {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.850036] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] There are 28 instances to clean {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11762}} [ 1426.850187] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 43b466d6-03fb-49d5-9404-f0009199fbe1] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1426.995779] env[68638]: DEBUG nova.compute.manager [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1426.996012] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1426.996893] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c48729-ee0e-49a3-857f-5a6a1404c688 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.004446] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1427.004666] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ccbc7e0c-bd7f-453f-ae3a-b15d2ba0d862 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.010387] env[68638]: DEBUG oslo_vmware.api [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1427.010387] env[68638]: value = "task-2834703" [ 1427.010387] env[68638]: _type = "Task" [ 1427.010387] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.017471] env[68638]: DEBUG oslo_vmware.api [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834703, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.056653] env[68638]: INFO nova.compute.manager [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] instance snapshotting [ 1427.057267] env[68638]: DEBUG nova.objects.instance [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lazy-loading 'flavor' on Instance uuid 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1427.353218] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 230f1a80-cf88-41c1-984f-d687932461d7] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1427.519714] env[68638]: DEBUG oslo_vmware.api [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834703, 'name': PowerOffVM_Task, 'duration_secs': 0.184144} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.520040] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1427.520239] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1427.520485] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-754ffd27-ff5f-41ee-bf43-c8703df16a62 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.563640] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f46de8-b15b-41bb-ac9d-f6441d67d71b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.585660] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adfc3b40-a57f-4f1a-809b-687046d5f475 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.588287] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1427.588480] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1427.588656] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Deleting the datastore file [datastore2] 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1427.588894] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-45bab04e-e153-4091-9a82-03d379f9043e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.597942] env[68638]: DEBUG oslo_vmware.api [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1427.597942] env[68638]: value = "task-2834705" [ 1427.597942] env[68638]: _type = "Task" [ 1427.597942] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.605164] env[68638]: DEBUG oslo_vmware.api [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834705, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.856790] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: e6bb1034-e440-4fb2-ba56-a734c4f67cdb] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1428.099629] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Creating Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1428.100789] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4b7f262c-6f4e-4bc8-a7bf-12d877026bf4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.110820] env[68638]: DEBUG oslo_vmware.api [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834705, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135708} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.111884] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1428.112084] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1428.112268] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1428.112441] env[68638]: INFO nova.compute.manager [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1428.112674] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1428.112922] env[68638]: DEBUG oslo_vmware.api [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1428.112922] env[68638]: value = "task-2834706" [ 1428.112922] env[68638]: _type = "Task" [ 1428.112922] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.113115] env[68638]: DEBUG nova.compute.manager [-] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1428.113212] env[68638]: DEBUG nova.network.neutron [-] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1428.121747] env[68638]: DEBUG oslo_vmware.api [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834706, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.360036] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 4db12faa-4c35-42ae-add5-19372e1d8807] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1428.535725] env[68638]: DEBUG nova.compute.manager [req-00bb1991-2104-4390-a170-cf805608ee06 req-9e240917-e67b-4ebc-aabe-2bbfb6f49fd7 service nova] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Received event network-vif-deleted-32c36f62-8c15-481f-9b9e-613eafeaeae8 {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1428.535931] env[68638]: INFO nova.compute.manager [req-00bb1991-2104-4390-a170-cf805608ee06 req-9e240917-e67b-4ebc-aabe-2bbfb6f49fd7 service nova] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Neutron deleted interface 32c36f62-8c15-481f-9b9e-613eafeaeae8; detaching it from the instance and deleting it from the info cache [ 1428.536237] env[68638]: DEBUG nova.network.neutron [req-00bb1991-2104-4390-a170-cf805608ee06 req-9e240917-e67b-4ebc-aabe-2bbfb6f49fd7 service nova] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1428.623919] env[68638]: DEBUG oslo_vmware.api [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834706, 'name': CreateSnapshot_Task, 'duration_secs': 0.415336} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.624221] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Created Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1428.624942] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b47dd11-c216-4309-8946-7725e1d1feb0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.863182] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 2452dd7a-5f16-4094-9407-59405eed572b] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1429.019471] env[68638]: DEBUG nova.network.neutron [-] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1429.038294] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b0c79505-5b72-4fdb-b53b-73f3e8e739ee {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.048089] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20e00ff4-6518-4183-b876-ddea4871777d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.073206] env[68638]: DEBUG nova.compute.manager [req-00bb1991-2104-4390-a170-cf805608ee06 req-9e240917-e67b-4ebc-aabe-2bbfb6f49fd7 service nova] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Detach interface failed, port_id=32c36f62-8c15-481f-9b9e-613eafeaeae8, reason: Instance 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1429.142512] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Creating linked-clone VM from snapshot {{(pid=68638) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1429.142809] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7b59d94c-2cc9-4a17-8e48-004efb2b5b46 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.150858] env[68638]: DEBUG oslo_vmware.api [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1429.150858] env[68638]: value = "task-2834707" [ 1429.150858] env[68638]: _type = "Task" [ 1429.150858] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.159321] env[68638]: DEBUG oslo_vmware.api [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834707, 'name': CloneVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.367017] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: d7fd30c6-3e0b-4564-9369-f29dc59a4d74] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1429.521934] env[68638]: INFO nova.compute.manager [-] [instance: 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0] Took 1.41 seconds to deallocate network for instance. [ 1429.661442] env[68638]: DEBUG oslo_vmware.api [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834707, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.870449] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: e2e74700-aa83-484a-a61f-9f98a6019fdb] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1430.027974] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1430.028366] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1430.028565] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1430.081026] env[68638]: INFO nova.scheduler.client.report [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Deleted allocations for instance 3db8e8d5-d427-426b-b2c7-ceeb6cf866d0 [ 1430.161183] env[68638]: DEBUG oslo_vmware.api [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834707, 'name': CloneVM_Task, 'duration_secs': 0.888346} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.161451] env[68638]: INFO nova.virt.vmwareapi.vmops [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Created linked-clone VM from snapshot [ 1430.162188] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-095dba19-b2a5-4391-877d-37b644fdffd7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.168965] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Uploading image bc377c69-c88b-458a-b32c-46866ede52e8 {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1430.188110] env[68638]: DEBUG oslo_vmware.rw_handles [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1430.188110] env[68638]: value = "vm-570073" [ 1430.188110] env[68638]: _type = "VirtualMachine" [ 1430.188110] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1430.188568] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-c8285f42-6349-4880-9368-25c0d63743db {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.195965] env[68638]: DEBUG oslo_vmware.rw_handles [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lease: (returnval){ [ 1430.195965] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ac2949-f763-b011-fb5d-16abc1968c73" [ 1430.195965] env[68638]: _type = "HttpNfcLease" [ 1430.195965] env[68638]: } obtained for exporting VM: (result){ [ 1430.195965] env[68638]: value = "vm-570073" [ 1430.195965] env[68638]: _type = "VirtualMachine" [ 1430.195965] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1430.196241] env[68638]: DEBUG oslo_vmware.api [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the lease: (returnval){ [ 1430.196241] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ac2949-f763-b011-fb5d-16abc1968c73" [ 1430.196241] env[68638]: _type = "HttpNfcLease" [ 1430.196241] env[68638]: } to be ready. {{(pid=68638) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1430.201772] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1430.201772] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ac2949-f763-b011-fb5d-16abc1968c73" [ 1430.201772] env[68638]: _type = "HttpNfcLease" [ 1430.201772] env[68638]: } is initializing. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1430.373994] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 2e788c4c-f6d1-4001-9389-1068887d205f] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1430.588038] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2acfd38b-f7d8-488e-9079-4c64270a5810 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "3db8e8d5-d427-426b-b2c7-ceeb6cf866d0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.098s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1430.703786] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1430.703786] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ac2949-f763-b011-fb5d-16abc1968c73" [ 1430.703786] env[68638]: _type = "HttpNfcLease" [ 1430.703786] env[68638]: } is ready. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1430.704097] env[68638]: DEBUG oslo_vmware.rw_handles [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1430.704097] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ac2949-f763-b011-fb5d-16abc1968c73" [ 1430.704097] env[68638]: _type = "HttpNfcLease" [ 1430.704097] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1430.704792] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35938262-ede9-4162-ac67-3f899b3cfa51 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.711522] env[68638]: DEBUG oslo_vmware.rw_handles [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52eb85bb-ab3b-ca03-4877-97b84ba122be/disk-0.vmdk from lease info. {{(pid=68638) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1430.711690] env[68638]: DEBUG oslo_vmware.rw_handles [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52eb85bb-ab3b-ca03-4877-97b84ba122be/disk-0.vmdk for reading. {{(pid=68638) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1430.800015] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-127cf6ba-66bc-4b8d-8301-cef500856b1b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.877280] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 00a22fef-5d10-4413-a9aa-070a1a863cdd] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1431.380247] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 3c5b81aa-5fa7-4b9b-b346-fce1c6ba28eb] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1431.422271] env[68638]: DEBUG oslo_concurrency.lockutils [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "7855131e-d65c-40c3-8566-86a80ba8e5db" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1431.422599] env[68638]: DEBUG oslo_concurrency.lockutils [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "7855131e-d65c-40c3-8566-86a80ba8e5db" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1431.884275] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 25c35c36-71c9-48cd-b7e4-6293eef890e5] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1431.925808] env[68638]: DEBUG nova.compute.manager [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1432.388196] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 4ae5ab72-419c-47a7-b34e-a43d3fbfb0a4] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1432.451599] env[68638]: DEBUG oslo_concurrency.lockutils [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1432.451988] env[68638]: DEBUG oslo_concurrency.lockutils [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1432.453934] env[68638]: INFO nova.compute.claims [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1432.892060] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 4a0c0188-69bb-441e-a930-ab20be5b2319] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1433.395285] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: f9bd4416-b2c3-4bdd-9066-08935d304765] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1433.504301] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9bda53e-c801-4942-a041-c8dd2fc6c495 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.512081] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ecfc276-844b-402d-8de9-4d2b87f9aa65 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.541579] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7edb2976-be67-41d2-a33d-a80ed41b379b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.549073] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b54ee3-5144-4940-8739-ccf91a33e91d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.562662] env[68638]: DEBUG nova.compute.provider_tree [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1433.898477] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: ba07529b-e6d0-4c22-b938-c4908a7eafd7] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1434.066298] env[68638]: DEBUG nova.scheduler.client.report [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1434.401449] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: e0903192-4fa7-437a-9023-33e8e65124e3] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1434.571855] env[68638]: DEBUG oslo_concurrency.lockutils [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.120s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1434.572406] env[68638]: DEBUG nova.compute.manager [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1434.904615] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 0bfd41b5-e712-4bfb-94b3-5ecf84a2f9f1] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1435.079373] env[68638]: DEBUG nova.compute.utils [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1435.080856] env[68638]: DEBUG nova.compute.manager [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1435.081553] env[68638]: DEBUG nova.network.neutron [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1435.147469] env[68638]: DEBUG nova.policy [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0fb22fd94276463ebb001ec679a36fec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c238a05699ee42f9a3d69c16f0777ae9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1435.407860] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: a576ba6f-5e3b-4408-b95d-2084a072ec12] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1435.433761] env[68638]: DEBUG nova.network.neutron [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Successfully created port: 5aa25b45-f4df-4cce-af91-452fa7969cbb {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1435.585746] env[68638]: DEBUG nova.compute.manager [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1435.911328] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: c66805eb-fd97-4fe3-984d-8759f227d7fc] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1436.414813] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 58e71a29-5cf3-4ffa-a4a8-a414aac1e8f9] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1436.596272] env[68638]: DEBUG nova.compute.manager [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1436.620458] env[68638]: DEBUG nova.virt.hardware [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1436.620729] env[68638]: DEBUG nova.virt.hardware [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1436.620909] env[68638]: DEBUG nova.virt.hardware [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1436.621124] env[68638]: DEBUG nova.virt.hardware [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1436.621275] env[68638]: DEBUG nova.virt.hardware [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1436.621421] env[68638]: DEBUG nova.virt.hardware [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1436.621652] env[68638]: DEBUG nova.virt.hardware [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1436.621817] env[68638]: DEBUG nova.virt.hardware [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1436.621986] env[68638]: DEBUG nova.virt.hardware [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1436.622164] env[68638]: DEBUG nova.virt.hardware [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1436.622382] env[68638]: DEBUG nova.virt.hardware [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1436.623897] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b3719d4-a7fb-4cb5-8160-0c495b1ac20b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.632333] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7717cfa-f51f-440d-9d5a-0090435613ad {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.918336] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: ace44b04-6dcf-4845-af4e-b28ddeebe60e] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1437.036175] env[68638]: DEBUG nova.compute.manager [req-687520c0-575b-4e80-a2fd-4496904e1dea req-9e6ea955-60ce-4942-a6e9-4880ab097319 service nova] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Received event network-vif-plugged-5aa25b45-f4df-4cce-af91-452fa7969cbb {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1437.036395] env[68638]: DEBUG oslo_concurrency.lockutils [req-687520c0-575b-4e80-a2fd-4496904e1dea req-9e6ea955-60ce-4942-a6e9-4880ab097319 service nova] Acquiring lock "7855131e-d65c-40c3-8566-86a80ba8e5db-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1437.036678] env[68638]: DEBUG oslo_concurrency.lockutils [req-687520c0-575b-4e80-a2fd-4496904e1dea req-9e6ea955-60ce-4942-a6e9-4880ab097319 service nova] Lock "7855131e-d65c-40c3-8566-86a80ba8e5db-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1437.036870] env[68638]: DEBUG oslo_concurrency.lockutils [req-687520c0-575b-4e80-a2fd-4496904e1dea req-9e6ea955-60ce-4942-a6e9-4880ab097319 service nova] Lock "7855131e-d65c-40c3-8566-86a80ba8e5db-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1437.037183] env[68638]: DEBUG nova.compute.manager [req-687520c0-575b-4e80-a2fd-4496904e1dea req-9e6ea955-60ce-4942-a6e9-4880ab097319 service nova] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] No waiting events found dispatching network-vif-plugged-5aa25b45-f4df-4cce-af91-452fa7969cbb {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1437.037410] env[68638]: WARNING nova.compute.manager [req-687520c0-575b-4e80-a2fd-4496904e1dea req-9e6ea955-60ce-4942-a6e9-4880ab097319 service nova] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Received unexpected event network-vif-plugged-5aa25b45-f4df-4cce-af91-452fa7969cbb for instance with vm_state building and task_state spawning. [ 1437.127921] env[68638]: DEBUG nova.network.neutron [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Successfully updated port: 5aa25b45-f4df-4cce-af91-452fa7969cbb {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1437.422291] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: cc2e9758-45ee-4e94-ad74-ba7d6c85f06d] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1437.630983] env[68638]: DEBUG oslo_concurrency.lockutils [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "refresh_cache-7855131e-d65c-40c3-8566-86a80ba8e5db" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1437.631211] env[68638]: DEBUG oslo_concurrency.lockutils [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired lock "refresh_cache-7855131e-d65c-40c3-8566-86a80ba8e5db" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1437.631350] env[68638]: DEBUG nova.network.neutron [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1437.926986] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 7d99d946-f2df-4d31-911f-ac479849b901] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1437.966835] env[68638]: DEBUG oslo_vmware.rw_handles [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52eb85bb-ab3b-ca03-4877-97b84ba122be/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1437.967795] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce078d7-d556-430d-95a8-b8949cd5d06d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.975921] env[68638]: DEBUG oslo_vmware.rw_handles [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52eb85bb-ab3b-ca03-4877-97b84ba122be/disk-0.vmdk is in state: ready. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1437.976109] env[68638]: ERROR oslo_vmware.rw_handles [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52eb85bb-ab3b-ca03-4877-97b84ba122be/disk-0.vmdk due to incomplete transfer. [ 1437.976331] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-89930cf1-f467-4766-a87b-ab0b6eff7e3a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.982853] env[68638]: DEBUG oslo_vmware.rw_handles [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52eb85bb-ab3b-ca03-4877-97b84ba122be/disk-0.vmdk. {{(pid=68638) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1437.983080] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Uploaded image bc377c69-c88b-458a-b32c-46866ede52e8 to the Glance image server {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1437.985379] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Destroying the VM {{(pid=68638) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1437.985611] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b8b72e8c-6d0e-49a9-8979-bcf7a02658d4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.991433] env[68638]: DEBUG oslo_vmware.api [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1437.991433] env[68638]: value = "task-2834709" [ 1437.991433] env[68638]: _type = "Task" [ 1437.991433] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.998812] env[68638]: DEBUG oslo_vmware.api [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834709, 'name': Destroy_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.171205] env[68638]: DEBUG nova.network.neutron [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1438.297825] env[68638]: DEBUG nova.network.neutron [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Updating instance_info_cache with network_info: [{"id": "5aa25b45-f4df-4cce-af91-452fa7969cbb", "address": "fa:16:3e:5c:d8:13", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5aa25b45-f4", "ovs_interfaceid": "5aa25b45-f4df-4cce-af91-452fa7969cbb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1438.429053] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: df2e066d-7c71-4aec-ab9b-a339a7ff21fb] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1438.500702] env[68638]: DEBUG oslo_vmware.api [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834709, 'name': Destroy_Task, 'duration_secs': 0.34478} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.500951] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Destroyed the VM [ 1438.501238] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Deleting Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1438.501495] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-baee1573-77b6-4366-aae6-1083aaba1c05 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.508230] env[68638]: DEBUG oslo_vmware.api [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1438.508230] env[68638]: value = "task-2834710" [ 1438.508230] env[68638]: _type = "Task" [ 1438.508230] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.515794] env[68638]: DEBUG oslo_vmware.api [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834710, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.800248] env[68638]: DEBUG oslo_concurrency.lockutils [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Releasing lock "refresh_cache-7855131e-d65c-40c3-8566-86a80ba8e5db" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1438.800635] env[68638]: DEBUG nova.compute.manager [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Instance network_info: |[{"id": "5aa25b45-f4df-4cce-af91-452fa7969cbb", "address": "fa:16:3e:5c:d8:13", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5aa25b45-f4", "ovs_interfaceid": "5aa25b45-f4df-4cce-af91-452fa7969cbb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1438.801175] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:d8:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19598cc1-e105-4565-906a-09dde75e3fbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5aa25b45-f4df-4cce-af91-452fa7969cbb', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1438.809894] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1438.810175] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1438.810456] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fda88f66-9e1b-4a0a-9e77-25bcbe7a3140 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.831802] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1438.831802] env[68638]: value = "task-2834711" [ 1438.831802] env[68638]: _type = "Task" [ 1438.831802] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.839343] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834711, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.932227] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 0249ffb9-82ed-44db-bb20-e619eaa176dd] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1439.018699] env[68638]: DEBUG oslo_vmware.api [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834710, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.062145] env[68638]: DEBUG nova.compute.manager [req-2bd9c0e9-8bb7-45d3-a659-5df8717aa89a req-513fe33b-8ad4-4358-b16e-0edfee47ba24 service nova] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Received event network-changed-5aa25b45-f4df-4cce-af91-452fa7969cbb {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1439.062382] env[68638]: DEBUG nova.compute.manager [req-2bd9c0e9-8bb7-45d3-a659-5df8717aa89a req-513fe33b-8ad4-4358-b16e-0edfee47ba24 service nova] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Refreshing instance network info cache due to event network-changed-5aa25b45-f4df-4cce-af91-452fa7969cbb. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1439.062619] env[68638]: DEBUG oslo_concurrency.lockutils [req-2bd9c0e9-8bb7-45d3-a659-5df8717aa89a req-513fe33b-8ad4-4358-b16e-0edfee47ba24 service nova] Acquiring lock "refresh_cache-7855131e-d65c-40c3-8566-86a80ba8e5db" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1439.062787] env[68638]: DEBUG oslo_concurrency.lockutils [req-2bd9c0e9-8bb7-45d3-a659-5df8717aa89a req-513fe33b-8ad4-4358-b16e-0edfee47ba24 service nova] Acquired lock "refresh_cache-7855131e-d65c-40c3-8566-86a80ba8e5db" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1439.062981] env[68638]: DEBUG nova.network.neutron [req-2bd9c0e9-8bb7-45d3-a659-5df8717aa89a req-513fe33b-8ad4-4358-b16e-0edfee47ba24 service nova] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Refreshing network info cache for port 5aa25b45-f4df-4cce-af91-452fa7969cbb {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1439.341626] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834711, 'name': CreateVM_Task, 'duration_secs': 0.484886} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.341800] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1439.342434] env[68638]: DEBUG oslo_concurrency.lockutils [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1439.342622] env[68638]: DEBUG oslo_concurrency.lockutils [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1439.342943] env[68638]: DEBUG oslo_concurrency.lockutils [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1439.343207] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eea3f420-f4db-427b-9bb6-67f4749f835a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.347304] env[68638]: DEBUG oslo_vmware.api [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1439.347304] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52fb4f7d-030c-1e05-a11b-c6b882fde0e6" [ 1439.347304] env[68638]: _type = "Task" [ 1439.347304] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.354343] env[68638]: DEBUG oslo_vmware.api [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52fb4f7d-030c-1e05-a11b-c6b882fde0e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.436031] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 53e92f51-9010-4fb2-89e1-9d16a252ef6e] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1439.519051] env[68638]: DEBUG oslo_vmware.api [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834710, 'name': RemoveSnapshot_Task, 'duration_secs': 0.538431} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.519242] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Deleted Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1439.519475] env[68638]: INFO nova.compute.manager [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Took 11.96 seconds to snapshot the instance on the hypervisor. [ 1439.732148] env[68638]: DEBUG nova.network.neutron [req-2bd9c0e9-8bb7-45d3-a659-5df8717aa89a req-513fe33b-8ad4-4358-b16e-0edfee47ba24 service nova] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Updated VIF entry in instance network info cache for port 5aa25b45-f4df-4cce-af91-452fa7969cbb. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1439.732521] env[68638]: DEBUG nova.network.neutron [req-2bd9c0e9-8bb7-45d3-a659-5df8717aa89a req-513fe33b-8ad4-4358-b16e-0edfee47ba24 service nova] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Updating instance_info_cache with network_info: [{"id": "5aa25b45-f4df-4cce-af91-452fa7969cbb", "address": "fa:16:3e:5c:d8:13", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5aa25b45-f4", "ovs_interfaceid": "5aa25b45-f4df-4cce-af91-452fa7969cbb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1439.857753] env[68638]: DEBUG oslo_vmware.api [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52fb4f7d-030c-1e05-a11b-c6b882fde0e6, 'name': SearchDatastore_Task, 'duration_secs': 0.017229} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.858079] env[68638]: DEBUG oslo_concurrency.lockutils [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1439.858334] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1439.858569] env[68638]: DEBUG oslo_concurrency.lockutils [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1439.858716] env[68638]: DEBUG oslo_concurrency.lockutils [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1439.858893] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1439.859159] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0c90335-a4cc-4ada-bdfb-31ac91fcc307 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.866794] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1439.866965] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1439.867676] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f387785a-b642-4696-b22e-83742616135e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.872681] env[68638]: DEBUG oslo_vmware.api [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1439.872681] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52efa4c5-3051-8e03-cec0-2f06d9b5a036" [ 1439.872681] env[68638]: _type = "Task" [ 1439.872681] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.879456] env[68638]: DEBUG oslo_vmware.api [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52efa4c5-3051-8e03-cec0-2f06d9b5a036, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.939096] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 2fa9b930-c76c-4cac-a371-a6b9899dc71e] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1440.056297] env[68638]: DEBUG nova.compute.manager [None req-fba61010-8c52-4b19-bc22-04db09c09ea7 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Found 2 images (rotation: 2) {{(pid=68638) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1440.235095] env[68638]: DEBUG oslo_concurrency.lockutils [req-2bd9c0e9-8bb7-45d3-a659-5df8717aa89a req-513fe33b-8ad4-4358-b16e-0edfee47ba24 service nova] Releasing lock "refresh_cache-7855131e-d65c-40c3-8566-86a80ba8e5db" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1440.382395] env[68638]: DEBUG oslo_vmware.api [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52efa4c5-3051-8e03-cec0-2f06d9b5a036, 'name': SearchDatastore_Task, 'duration_secs': 0.009453} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.383173] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d719cec6-ebf0-4a42-85b0-fab365cf9f75 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.387787] env[68638]: DEBUG oslo_vmware.api [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1440.387787] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5297ecde-6124-8c6a-8d35-27ea08d95b04" [ 1440.387787] env[68638]: _type = "Task" [ 1440.387787] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.394669] env[68638]: DEBUG oslo_vmware.api [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5297ecde-6124-8c6a-8d35-27ea08d95b04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.442237] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [instance: 423af2cc-4dea-445f-a01c-6d4d57c3f0de] Instance has had 0 of 5 cleanup attempts {{(pid=68638) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1440.899021] env[68638]: DEBUG oslo_vmware.api [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5297ecde-6124-8c6a-8d35-27ea08d95b04, 'name': SearchDatastore_Task, 'duration_secs': 0.016209} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.899125] env[68638]: DEBUG oslo_concurrency.lockutils [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Releasing lock "[datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1440.899361] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 7855131e-d65c-40c3-8566-86a80ba8e5db/7855131e-d65c-40c3-8566-86a80ba8e5db.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1440.899621] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eb656ece-7f34-48bc-b0a3-8765552c2622 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.906249] env[68638]: DEBUG oslo_vmware.api [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1440.906249] env[68638]: value = "task-2834712" [ 1440.906249] env[68638]: _type = "Task" [ 1440.906249] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.914059] env[68638]: DEBUG oslo_vmware.api [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834712, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.945779] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1440.945957] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Cleaning up deleted instances with incomplete migration {{(pid=68638) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11791}} [ 1441.415750] env[68638]: DEBUG oslo_vmware.api [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834712, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505692} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.416052] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore2] 7855131e-d65c-40c3-8566-86a80ba8e5db/7855131e-d65c-40c3-8566-86a80ba8e5db.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1441.416257] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1441.416510] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-743469ab-68a1-47da-8ddd-fb20debbe821 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.423874] env[68638]: DEBUG oslo_vmware.api [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1441.423874] env[68638]: value = "task-2834713" [ 1441.423874] env[68638]: _type = "Task" [ 1441.423874] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.432045] env[68638]: DEBUG oslo_vmware.api [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834713, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.448558] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1441.476983] env[68638]: DEBUG nova.compute.manager [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1441.477892] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7fcc70b-436f-4bc8-95fc-f8d6847b8033 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.933512] env[68638]: DEBUG oslo_vmware.api [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834713, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069013} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.933736] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1441.934494] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3455cd1c-0514-4956-9693-e7c0b6858ab2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.955891] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] 7855131e-d65c-40c3-8566-86a80ba8e5db/7855131e-d65c-40c3-8566-86a80ba8e5db.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1441.956210] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a24e147d-1601-4211-83da-23a4e47bf6ed {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.974426] env[68638]: DEBUG oslo_vmware.api [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1441.974426] env[68638]: value = "task-2834714" [ 1441.974426] env[68638]: _type = "Task" [ 1441.974426] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.981687] env[68638]: DEBUG oslo_vmware.api [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834714, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.987520] env[68638]: INFO nova.compute.manager [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] instance snapshotting [ 1441.988070] env[68638]: DEBUG nova.objects.instance [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lazy-loading 'flavor' on Instance uuid 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1442.484272] env[68638]: DEBUG oslo_vmware.api [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834714, 'name': ReconfigVM_Task, 'duration_secs': 0.286104} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.484645] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Reconfigured VM instance instance-0000007e to attach disk [datastore2] 7855131e-d65c-40c3-8566-86a80ba8e5db/7855131e-d65c-40c3-8566-86a80ba8e5db.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1442.485246] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e98aa6fb-7523-4848-9098-901968df8e72 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.493545] env[68638]: DEBUG oslo_vmware.api [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1442.493545] env[68638]: value = "task-2834715" [ 1442.493545] env[68638]: _type = "Task" [ 1442.493545] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.497534] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c725b2b-50c2-40c7-bb2b-d58ca058e2f6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.504513] env[68638]: DEBUG oslo_vmware.api [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834715, 'name': Rename_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.517961] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15b85ebd-9c67-4052-8a35-b59796863395 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.969922] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager.update_available_resource {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.003239] env[68638]: DEBUG oslo_vmware.api [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834715, 'name': Rename_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.027722] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Creating Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1443.027957] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-56ff2384-2a33-4d5b-b770-4f4f3080fd04 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.034507] env[68638]: DEBUG oslo_vmware.api [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1443.034507] env[68638]: value = "task-2834716" [ 1443.034507] env[68638]: _type = "Task" [ 1443.034507] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.043026] env[68638]: DEBUG oslo_vmware.api [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834716, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.473100] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1443.473406] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1443.473588] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1443.473747] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68638) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1443.474639] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d91fd2cc-0c59-447f-b4cb-a4aac48ab771 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.482804] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e615e1-2576-4389-ad9c-1467ae819612 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.498914] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8252475-44a7-4ad8-9397-a8db06801ccb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.505788] env[68638]: DEBUG oslo_vmware.api [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834715, 'name': Rename_Task, 'duration_secs': 0.861918} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.507451] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1443.507706] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4148dfb0-582d-4078-9447-a68982a3a9d5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.509677] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de033210-9c9e-4bf5-b2fa-87a114c7019a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.539165] env[68638]: DEBUG oslo_vmware.api [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1443.539165] env[68638]: value = "task-2834717" [ 1443.539165] env[68638]: _type = "Task" [ 1443.539165] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.539509] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180661MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=68638) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1443.539641] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1443.539840] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1443.552323] env[68638]: DEBUG oslo_vmware.api [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834716, 'name': CreateSnapshot_Task, 'duration_secs': 0.465064} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.555325] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Created Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1443.555574] env[68638]: DEBUG oslo_vmware.api [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834717, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.556568] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1b870cb-ec70-42d5-9a9e-54a7a6eec076 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.054468] env[68638]: DEBUG oslo_vmware.api [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834717, 'name': PowerOnVM_Task, 'duration_secs': 0.415609} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.055368] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1444.055617] env[68638]: INFO nova.compute.manager [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Took 7.46 seconds to spawn the instance on the hypervisor. [ 1444.055803] env[68638]: DEBUG nova.compute.manager [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1444.056566] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc34811-913e-439c-b415-9e314f114f78 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.074206] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Creating linked-clone VM from snapshot {{(pid=68638) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1444.074444] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0f62fbc3-4e51-4c15-b2bd-6af118d10402 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.081888] env[68638]: DEBUG oslo_vmware.api [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1444.081888] env[68638]: value = "task-2834718" [ 1444.081888] env[68638]: _type = "Task" [ 1444.081888] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.089401] env[68638]: DEBUG oslo_vmware.api [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834718, 'name': CloneVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.570753] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1444.571086] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 7855131e-d65c-40c3-8566-86a80ba8e5db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1444.571086] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1444.571240] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1444.575971] env[68638]: INFO nova.compute.manager [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Took 12.15 seconds to build instance. [ 1444.591544] env[68638]: DEBUG oslo_vmware.api [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834718, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.618025] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4af67ec-b489-49ba-b0db-e7223b5ffb6c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.625475] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d66793-2615-45fe-b9e0-e0502c3ea826 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.654486] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f7b5746-0ae2-49f0-bf65-c74d0277599b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.661318] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90419ef6-a058-4dc5-93b1-d9d9bd49a19d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.675229] env[68638]: DEBUG nova.compute.provider_tree [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1445.078431] env[68638]: DEBUG oslo_concurrency.lockutils [None req-441ac70d-36aa-4c57-b14a-89444c1385c9 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "7855131e-d65c-40c3-8566-86a80ba8e5db" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.656s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1445.092315] env[68638]: DEBUG oslo_vmware.api [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834718, 'name': CloneVM_Task} progress is 95%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.178564] env[68638]: DEBUG nova.scheduler.client.report [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1445.593687] env[68638]: DEBUG oslo_vmware.api [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834718, 'name': CloneVM_Task, 'duration_secs': 1.173202} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.594123] env[68638]: INFO nova.virt.vmwareapi.vmops [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Created linked-clone VM from snapshot [ 1445.595032] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-573ed634-8307-4969-82e4-ab4ecbbfca8e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.603094] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Uploading image 8f5d2fd0-fb10-457c-9e8d-6e2e9774684a {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1445.612025] env[68638]: DEBUG nova.compute.manager [req-baa4f429-1e35-48f7-b64d-543ad286727d req-1f49d659-a536-4d69-9733-daa495e321a6 service nova] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Received event network-changed-5aa25b45-f4df-4cce-af91-452fa7969cbb {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1445.612217] env[68638]: DEBUG nova.compute.manager [req-baa4f429-1e35-48f7-b64d-543ad286727d req-1f49d659-a536-4d69-9733-daa495e321a6 service nova] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Refreshing instance network info cache due to event network-changed-5aa25b45-f4df-4cce-af91-452fa7969cbb. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1445.612455] env[68638]: DEBUG oslo_concurrency.lockutils [req-baa4f429-1e35-48f7-b64d-543ad286727d req-1f49d659-a536-4d69-9733-daa495e321a6 service nova] Acquiring lock "refresh_cache-7855131e-d65c-40c3-8566-86a80ba8e5db" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1445.612596] env[68638]: DEBUG oslo_concurrency.lockutils [req-baa4f429-1e35-48f7-b64d-543ad286727d req-1f49d659-a536-4d69-9733-daa495e321a6 service nova] Acquired lock "refresh_cache-7855131e-d65c-40c3-8566-86a80ba8e5db" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1445.612769] env[68638]: DEBUG nova.network.neutron [req-baa4f429-1e35-48f7-b64d-543ad286727d req-1f49d659-a536-4d69-9733-daa495e321a6 service nova] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Refreshing network info cache for port 5aa25b45-f4df-4cce-af91-452fa7969cbb {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1445.631950] env[68638]: DEBUG oslo_vmware.rw_handles [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1445.631950] env[68638]: value = "vm-570076" [ 1445.631950] env[68638]: _type = "VirtualMachine" [ 1445.631950] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1445.632683] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e86a275c-18f2-43cc-97f0-8f86de64ae5a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.639991] env[68638]: DEBUG oslo_vmware.rw_handles [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lease: (returnval){ [ 1445.639991] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52f4a3b3-59c6-51fc-b155-8bda631d60ec" [ 1445.639991] env[68638]: _type = "HttpNfcLease" [ 1445.639991] env[68638]: } obtained for exporting VM: (result){ [ 1445.639991] env[68638]: value = "vm-570076" [ 1445.639991] env[68638]: _type = "VirtualMachine" [ 1445.639991] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1445.640296] env[68638]: DEBUG oslo_vmware.api [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the lease: (returnval){ [ 1445.640296] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52f4a3b3-59c6-51fc-b155-8bda631d60ec" [ 1445.640296] env[68638]: _type = "HttpNfcLease" [ 1445.640296] env[68638]: } to be ready. {{(pid=68638) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1445.647421] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1445.647421] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52f4a3b3-59c6-51fc-b155-8bda631d60ec" [ 1445.647421] env[68638]: _type = "HttpNfcLease" [ 1445.647421] env[68638]: } is initializing. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1445.683738] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68638) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1445.683944] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.144s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1446.148786] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1446.148786] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52f4a3b3-59c6-51fc-b155-8bda631d60ec" [ 1446.148786] env[68638]: _type = "HttpNfcLease" [ 1446.148786] env[68638]: } is ready. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1446.148999] env[68638]: DEBUG oslo_vmware.rw_handles [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1446.148999] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52f4a3b3-59c6-51fc-b155-8bda631d60ec" [ 1446.148999] env[68638]: _type = "HttpNfcLease" [ 1446.148999] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1446.149824] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f1a5c5f-8d14-4c17-9eeb-d97f00d95991 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.158769] env[68638]: DEBUG oslo_vmware.rw_handles [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528d39e0-83d9-be98-1256-8bea8849281c/disk-0.vmdk from lease info. {{(pid=68638) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1446.158856] env[68638]: DEBUG oslo_vmware.rw_handles [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528d39e0-83d9-be98-1256-8bea8849281c/disk-0.vmdk for reading. {{(pid=68638) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1446.252870] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1c32f323-0f5b-4d38-8366-75e05228aa48 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.363984] env[68638]: DEBUG nova.network.neutron [req-baa4f429-1e35-48f7-b64d-543ad286727d req-1f49d659-a536-4d69-9733-daa495e321a6 service nova] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Updated VIF entry in instance network info cache for port 5aa25b45-f4df-4cce-af91-452fa7969cbb. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1446.364394] env[68638]: DEBUG nova.network.neutron [req-baa4f429-1e35-48f7-b64d-543ad286727d req-1f49d659-a536-4d69-9733-daa495e321a6 service nova] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Updating instance_info_cache with network_info: [{"id": "5aa25b45-f4df-4cce-af91-452fa7969cbb", "address": "fa:16:3e:5c:d8:13", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5aa25b45-f4", "ovs_interfaceid": "5aa25b45-f4df-4cce-af91-452fa7969cbb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1446.867497] env[68638]: DEBUG oslo_concurrency.lockutils [req-baa4f429-1e35-48f7-b64d-543ad286727d req-1f49d659-a536-4d69-9733-daa495e321a6 service nova] Releasing lock "refresh_cache-7855131e-d65c-40c3-8566-86a80ba8e5db" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1453.415823] env[68638]: DEBUG oslo_vmware.rw_handles [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528d39e0-83d9-be98-1256-8bea8849281c/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1453.416864] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f1b7167-aeba-41e4-8e59-873ddf90fe34 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.423526] env[68638]: DEBUG oslo_vmware.rw_handles [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528d39e0-83d9-be98-1256-8bea8849281c/disk-0.vmdk is in state: ready. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1453.423690] env[68638]: ERROR oslo_vmware.rw_handles [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528d39e0-83d9-be98-1256-8bea8849281c/disk-0.vmdk due to incomplete transfer. [ 1453.423900] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-0e4ba770-4005-46a0-847a-086f3bda083a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.431704] env[68638]: DEBUG oslo_vmware.rw_handles [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528d39e0-83d9-be98-1256-8bea8849281c/disk-0.vmdk. {{(pid=68638) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1453.431894] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Uploaded image 8f5d2fd0-fb10-457c-9e8d-6e2e9774684a to the Glance image server {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1453.434133] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Destroying the VM {{(pid=68638) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1453.434352] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5d4fb4ea-76f6-40ab-8562-ccf370b5eecd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.439449] env[68638]: DEBUG oslo_vmware.api [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1453.439449] env[68638]: value = "task-2834720" [ 1453.439449] env[68638]: _type = "Task" [ 1453.439449] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.447828] env[68638]: DEBUG oslo_vmware.api [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834720, 'name': Destroy_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.949585] env[68638]: DEBUG oslo_vmware.api [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834720, 'name': Destroy_Task, 'duration_secs': 0.340496} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.949871] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Destroyed the VM [ 1453.950157] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Deleting Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1453.950422] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d466d700-edcb-4c56-9573-fb88d3061768 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.956968] env[68638]: DEBUG oslo_vmware.api [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1453.956968] env[68638]: value = "task-2834721" [ 1453.956968] env[68638]: _type = "Task" [ 1453.956968] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.964622] env[68638]: DEBUG oslo_vmware.api [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834721, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.466952] env[68638]: DEBUG oslo_vmware.api [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834721, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.968612] env[68638]: DEBUG oslo_vmware.api [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834721, 'name': RemoveSnapshot_Task, 'duration_secs': 0.621393} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.968886] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Deleted Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1454.969138] env[68638]: INFO nova.compute.manager [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Took 12.47 seconds to snapshot the instance on the hypervisor. [ 1455.511479] env[68638]: DEBUG nova.compute.manager [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Found 3 images (rotation: 2) {{(pid=68638) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1455.511769] env[68638]: DEBUG nova.compute.manager [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Rotating out 1 backups {{(pid=68638) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5025}} [ 1455.511829] env[68638]: DEBUG nova.compute.manager [None req-afaaaec9-e143-49df-9a5b-ff2f5472b87e tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Deleting image f08ceddd-c61f-455e-9d9a-7b845f2c1218 {{(pid=68638) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5030}} [ 1457.637801] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4191b59a-ec18-421d-a681-a47b32feb0fb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1457.638264] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4191b59a-ec18-421d-a681-a47b32feb0fb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1457.638341] env[68638]: DEBUG nova.compute.manager [None req-4191b59a-ec18-421d-a681-a47b32feb0fb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1457.639208] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-976513c2-0a23-4b68-8f4e-3e53fc6d1300 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.646035] env[68638]: DEBUG nova.compute.manager [None req-4191b59a-ec18-421d-a681-a47b32feb0fb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68638) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1457.646578] env[68638]: DEBUG nova.objects.instance [None req-4191b59a-ec18-421d-a681-a47b32feb0fb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lazy-loading 'flavor' on Instance uuid 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1458.654584] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4191b59a-ec18-421d-a681-a47b32feb0fb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1458.655056] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-89c7f3e5-a4c4-47a4-a06d-4fb634e10184 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.662404] env[68638]: DEBUG oslo_vmware.api [None req-4191b59a-ec18-421d-a681-a47b32feb0fb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1458.662404] env[68638]: value = "task-2834722" [ 1458.662404] env[68638]: _type = "Task" [ 1458.662404] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.669896] env[68638]: DEBUG oslo_vmware.api [None req-4191b59a-ec18-421d-a681-a47b32feb0fb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834722, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.171823] env[68638]: DEBUG oslo_vmware.api [None req-4191b59a-ec18-421d-a681-a47b32feb0fb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834722, 'name': PowerOffVM_Task, 'duration_secs': 0.179387} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.172172] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-4191b59a-ec18-421d-a681-a47b32feb0fb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1459.172390] env[68638]: DEBUG nova.compute.manager [None req-4191b59a-ec18-421d-a681-a47b32feb0fb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1459.173160] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e96950-6d59-442a-8019-7c4ec9f6705e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.684422] env[68638]: DEBUG oslo_concurrency.lockutils [None req-4191b59a-ec18-421d-a681-a47b32feb0fb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.046s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1460.745410] env[68638]: DEBUG nova.compute.manager [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Stashing vm_state: stopped {{(pid=68638) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1461.262816] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1461.263128] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1461.768575] env[68638]: INFO nova.compute.claims [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1462.274914] env[68638]: INFO nova.compute.resource_tracker [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Updating resource usage from migration 65437cb6-27ac-4d70-b34a-cf86fa2daafa [ 1462.324915] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acec33a7-0530-470d-8af5-4441970b8cde {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.332507] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f0d95f3-791a-4752-bdee-9e5a909ca389 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.362609] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e9b60cf-ade4-4cab-9369-ff3b57d0d225 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.369363] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed33b0be-d4fa-495d-b322-a5ab51837bed {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.382068] env[68638]: DEBUG nova.compute.provider_tree [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1462.885315] env[68638]: DEBUG nova.scheduler.client.report [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1463.392906] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.130s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1463.393148] env[68638]: INFO nova.compute.manager [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Migrating [ 1463.908966] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "refresh_cache-0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1463.909206] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquired lock "refresh_cache-0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1463.909370] env[68638]: DEBUG nova.network.neutron [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1464.623493] env[68638]: DEBUG nova.network.neutron [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Updating instance_info_cache with network_info: [{"id": "2a6adb50-4ed4-4484-a4c6-2272e30f226a", "address": "fa:16:3e:10:24:f9", "network": {"id": "1011d63b-6b94-46e7-8fb7-2f1d20628113", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2061890536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9da776668a424815986399da431ae74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a6adb50-4e", "ovs_interfaceid": "2a6adb50-4ed4-4484-a4c6-2272e30f226a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1465.125993] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Releasing lock "refresh_cache-0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1466.641300] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e168b0-bc46-48a2-8675-d78082974bc5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.660706] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Updating instance '0e51f8b5-3ba0-408c-ab7a-9d29e99d7546' progress to 0 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1467.166465] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1467.166766] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9fbe4a61-fdf3-4d57-8c7c-667abf43a5c4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.174146] env[68638]: DEBUG oslo_vmware.api [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1467.174146] env[68638]: value = "task-2834723" [ 1467.174146] env[68638]: _type = "Task" [ 1467.174146] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.181524] env[68638]: DEBUG oslo_vmware.api [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834723, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.684355] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] VM already powered off {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1467.684726] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Updating instance '0e51f8b5-3ba0-408c-ab7a-9d29e99d7546' progress to 17 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1468.191525] env[68638]: DEBUG nova.virt.hardware [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1468.191768] env[68638]: DEBUG nova.virt.hardware [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1468.191938] env[68638]: DEBUG nova.virt.hardware [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1468.192139] env[68638]: DEBUG nova.virt.hardware [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1468.192286] env[68638]: DEBUG nova.virt.hardware [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1468.192432] env[68638]: DEBUG nova.virt.hardware [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1468.192643] env[68638]: DEBUG nova.virt.hardware [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1468.192794] env[68638]: DEBUG nova.virt.hardware [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1468.192959] env[68638]: DEBUG nova.virt.hardware [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1468.193135] env[68638]: DEBUG nova.virt.hardware [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1468.193310] env[68638]: DEBUG nova.virt.hardware [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1468.198440] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c4cd8d6-6494-442f-a67c-3deb40d67a36 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.214313] env[68638]: DEBUG oslo_vmware.api [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1468.214313] env[68638]: value = "task-2834724" [ 1468.214313] env[68638]: _type = "Task" [ 1468.214313] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.221884] env[68638]: DEBUG oslo_vmware.api [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834724, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.724269] env[68638]: DEBUG oslo_vmware.api [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834724, 'name': ReconfigVM_Task, 'duration_secs': 0.169859} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.724615] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Updating instance '0e51f8b5-3ba0-408c-ab7a-9d29e99d7546' progress to 33 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1469.231301] env[68638]: DEBUG nova.virt.hardware [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1469.231587] env[68638]: DEBUG nova.virt.hardware [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1469.231718] env[68638]: DEBUG nova.virt.hardware [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1469.231902] env[68638]: DEBUG nova.virt.hardware [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1469.232058] env[68638]: DEBUG nova.virt.hardware [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1469.232207] env[68638]: DEBUG nova.virt.hardware [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1469.232410] env[68638]: DEBUG nova.virt.hardware [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1469.232567] env[68638]: DEBUG nova.virt.hardware [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1469.232744] env[68638]: DEBUG nova.virt.hardware [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1469.232912] env[68638]: DEBUG nova.virt.hardware [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1469.233097] env[68638]: DEBUG nova.virt.hardware [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1469.238340] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Reconfiguring VM instance instance-0000007d to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1469.238618] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e267205d-f50f-46e5-8d2d-7530594834b1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.257335] env[68638]: DEBUG oslo_vmware.api [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1469.257335] env[68638]: value = "task-2834725" [ 1469.257335] env[68638]: _type = "Task" [ 1469.257335] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.267366] env[68638]: DEBUG oslo_vmware.api [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834725, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.767111] env[68638]: DEBUG oslo_vmware.api [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834725, 'name': ReconfigVM_Task, 'duration_secs': 0.161414} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.767474] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Reconfigured VM instance instance-0000007d to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1469.768161] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d260e752-66a9-43f0-88f8-7a7d043ad3be {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.789788] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546/0e51f8b5-3ba0-408c-ab7a-9d29e99d7546.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1469.789788] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a12be11-9512-4aa5-9d03-0bb41b6017fb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.807353] env[68638]: DEBUG oslo_vmware.api [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1469.807353] env[68638]: value = "task-2834726" [ 1469.807353] env[68638]: _type = "Task" [ 1469.807353] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.814536] env[68638]: DEBUG oslo_vmware.api [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834726, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.317958] env[68638]: DEBUG oslo_vmware.api [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834726, 'name': ReconfigVM_Task, 'duration_secs': 0.465138} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.318249] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Reconfigured VM instance instance-0000007d to attach disk [datastore1] 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546/0e51f8b5-3ba0-408c-ab7a-9d29e99d7546.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1470.318553] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Updating instance '0e51f8b5-3ba0-408c-ab7a-9d29e99d7546' progress to 50 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1470.825368] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd0301a-da22-4452-b04a-85252e23b1a9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.843525] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28cc63e8-d293-4d61-8e80-d3ca678c4c4f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.860472] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Updating instance '0e51f8b5-3ba0-408c-ab7a-9d29e99d7546' progress to 67 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1471.437096] env[68638]: DEBUG nova.network.neutron [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Port 2a6adb50-4ed4-4484-a4c6-2272e30f226a binding to destination host cpu-1 is already ACTIVE {{(pid=68638) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1472.457947] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "0e51f8b5-3ba0-408c-ab7a-9d29e99d7546-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1472.457947] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "0e51f8b5-3ba0-408c-ab7a-9d29e99d7546-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1472.458355] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "0e51f8b5-3ba0-408c-ab7a-9d29e99d7546-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1473.491605] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "refresh_cache-0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1473.491848] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquired lock "refresh_cache-0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1473.491974] env[68638]: DEBUG nova.network.neutron [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1474.175773] env[68638]: DEBUG nova.network.neutron [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Updating instance_info_cache with network_info: [{"id": "2a6adb50-4ed4-4484-a4c6-2272e30f226a", "address": "fa:16:3e:10:24:f9", "network": {"id": "1011d63b-6b94-46e7-8fb7-2f1d20628113", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2061890536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9da776668a424815986399da431ae74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a6adb50-4e", "ovs_interfaceid": "2a6adb50-4ed4-4484-a4c6-2272e30f226a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1474.678540] env[68638]: DEBUG oslo_concurrency.lockutils [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Releasing lock "refresh_cache-0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1475.202576] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49201bf-c8bd-4ba9-8116-4c551a7f4edb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.221173] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3983542-1211-4de8-9f55-76aeeef397e1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.227579] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Updating instance '0e51f8b5-3ba0-408c-ab7a-9d29e99d7546' progress to 83 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1475.733553] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-7690f2a5-9e16-4de5-8f82-fe772dd9fb78 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Updating instance '0e51f8b5-3ba0-408c-ab7a-9d29e99d7546' progress to 100 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1477.837979] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3fa7eec-2458-4cf6-9544-1945e52790e2 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1477.838496] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3fa7eec-2458-4cf6-9544-1945e52790e2 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1477.838887] env[68638]: DEBUG nova.compute.manager [None req-a3fa7eec-2458-4cf6-9544-1945e52790e2 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Going to confirm migration 9 {{(pid=68638) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1478.375037] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3fa7eec-2458-4cf6-9544-1945e52790e2 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "refresh_cache-0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1478.375240] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3fa7eec-2458-4cf6-9544-1945e52790e2 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquired lock "refresh_cache-0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1478.375410] env[68638]: DEBUG nova.network.neutron [None req-a3fa7eec-2458-4cf6-9544-1945e52790e2 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1478.375583] env[68638]: DEBUG nova.objects.instance [None req-a3fa7eec-2458-4cf6-9544-1945e52790e2 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lazy-loading 'info_cache' on Instance uuid 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1479.589487] env[68638]: DEBUG nova.network.neutron [None req-a3fa7eec-2458-4cf6-9544-1945e52790e2 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Updating instance_info_cache with network_info: [{"id": "2a6adb50-4ed4-4484-a4c6-2272e30f226a", "address": "fa:16:3e:10:24:f9", "network": {"id": "1011d63b-6b94-46e7-8fb7-2f1d20628113", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2061890536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9da776668a424815986399da431ae74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a6adb50-4e", "ovs_interfaceid": "2a6adb50-4ed4-4484-a4c6-2272e30f226a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1480.092293] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3fa7eec-2458-4cf6-9544-1945e52790e2 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Releasing lock "refresh_cache-0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1480.092556] env[68638]: DEBUG nova.objects.instance [None req-a3fa7eec-2458-4cf6-9544-1945e52790e2 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lazy-loading 'migration_context' on Instance uuid 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1480.595247] env[68638]: DEBUG nova.objects.base [None req-a3fa7eec-2458-4cf6-9544-1945e52790e2 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Object Instance<0e51f8b5-3ba0-408c-ab7a-9d29e99d7546> lazy-loaded attributes: info_cache,migration_context {{(pid=68638) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1480.596181] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fddf184-a89a-41fc-91a6-266cf45de92a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.615305] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c8f96c2-bd03-4902-ad74-061624424061 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.620235] env[68638]: DEBUG oslo_vmware.api [None req-a3fa7eec-2458-4cf6-9544-1945e52790e2 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1480.620235] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]5256c9b8-1101-7537-3f9d-73c2a1b035bf" [ 1480.620235] env[68638]: _type = "Task" [ 1480.620235] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.627282] env[68638]: DEBUG oslo_vmware.api [None req-a3fa7eec-2458-4cf6-9544-1945e52790e2 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5256c9b8-1101-7537-3f9d-73c2a1b035bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.130823] env[68638]: DEBUG oslo_vmware.api [None req-a3fa7eec-2458-4cf6-9544-1945e52790e2 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]5256c9b8-1101-7537-3f9d-73c2a1b035bf, 'name': SearchDatastore_Task, 'duration_secs': 0.00995} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.131173] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3fa7eec-2458-4cf6-9544-1945e52790e2 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1481.131415] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3fa7eec-2458-4cf6-9544-1945e52790e2 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1481.682925] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0539b2b-608c-4cce-a3a0-fd3a15a231c0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.690623] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2fafd72-bfaf-4ed9-967a-9a2dd42f24b1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.720753] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c44001-cf9a-4cbc-bd2b-9816ae53ea88 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.727493] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-427d0301-9813-4f40-bf25-7848df985c78 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.740827] env[68638]: DEBUG nova.compute.provider_tree [None req-a3fa7eec-2458-4cf6-9544-1945e52790e2 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1482.243506] env[68638]: DEBUG nova.scheduler.client.report [None req-a3fa7eec-2458-4cf6-9544-1945e52790e2 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1482.476794] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2a580fd7-fdfc-47d1-bf33-43c4b7a8c3bf tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "7855131e-d65c-40c3-8566-86a80ba8e5db" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1482.477070] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2a580fd7-fdfc-47d1-bf33-43c4b7a8c3bf tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "7855131e-d65c-40c3-8566-86a80ba8e5db" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1482.477261] env[68638]: DEBUG nova.compute.manager [None req-2a580fd7-fdfc-47d1-bf33-43c4b7a8c3bf tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1482.478183] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e646af6f-bc0d-4e6b-b888-a4a828754d24 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.484760] env[68638]: DEBUG nova.compute.manager [None req-2a580fd7-fdfc-47d1-bf33-43c4b7a8c3bf tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68638) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1482.485351] env[68638]: DEBUG nova.objects.instance [None req-2a580fd7-fdfc-47d1-bf33-43c4b7a8c3bf tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lazy-loading 'flavor' on Instance uuid 7855131e-d65c-40c3-8566-86a80ba8e5db {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1483.053992] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1483.054378] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1483.054378] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1483.054532] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1483.054671] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68638) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1483.253128] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3fa7eec-2458-4cf6-9544-1945e52790e2 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.121s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1483.253351] env[68638]: DEBUG nova.compute.manager [None req-a3fa7eec-2458-4cf6-9544-1945e52790e2 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=68638) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5376}} [ 1483.341185] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1483.491835] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a580fd7-fdfc-47d1-bf33-43c4b7a8c3bf tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1483.492207] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0141845f-8210-47cc-8a81-4b47661d7b09 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.500178] env[68638]: DEBUG oslo_vmware.api [None req-2a580fd7-fdfc-47d1-bf33-43c4b7a8c3bf tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1483.500178] env[68638]: value = "task-2834727" [ 1483.500178] env[68638]: _type = "Task" [ 1483.500178] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.509118] env[68638]: DEBUG oslo_vmware.api [None req-2a580fd7-fdfc-47d1-bf33-43c4b7a8c3bf tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834727, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.803966] env[68638]: INFO nova.scheduler.client.report [None req-a3fa7eec-2458-4cf6-9544-1945e52790e2 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Deleted allocation for migration 65437cb6-27ac-4d70-b34a-cf86fa2daafa [ 1484.010674] env[68638]: DEBUG oslo_vmware.api [None req-2a580fd7-fdfc-47d1-bf33-43c4b7a8c3bf tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834727, 'name': PowerOffVM_Task, 'duration_secs': 0.207879} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.010981] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a580fd7-fdfc-47d1-bf33-43c4b7a8c3bf tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1484.011226] env[68638]: DEBUG nova.compute.manager [None req-2a580fd7-fdfc-47d1-bf33-43c4b7a8c3bf tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1484.012090] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dde1d6d-64dc-48b0-9547-78fd895a14aa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.310033] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a3fa7eec-2458-4cf6-9544-1945e52790e2 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.471s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1484.524102] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2a580fd7-fdfc-47d1-bf33-43c4b7a8c3bf tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "7855131e-d65c-40c3-8566-86a80ba8e5db" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.047s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1484.669208] env[68638]: DEBUG nova.objects.instance [None req-a9a4995d-d2c0-4cd4-8f1c-bdcd013dcc23 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lazy-loading 'flavor' on Instance uuid 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1484.827149] env[68638]: DEBUG nova.objects.instance [None req-c2a43780-e14a-4418-bfe8-539c46810864 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lazy-loading 'flavor' on Instance uuid 7855131e-d65c-40c3-8566-86a80ba8e5db {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1485.173886] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a9a4995d-d2c0-4cd4-8f1c-bdcd013dcc23 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "refresh_cache-0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1485.174074] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a9a4995d-d2c0-4cd4-8f1c-bdcd013dcc23 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquired lock "refresh_cache-0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1485.174195] env[68638]: DEBUG nova.network.neutron [None req-a9a4995d-d2c0-4cd4-8f1c-bdcd013dcc23 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1485.174374] env[68638]: DEBUG nova.objects.instance [None req-a9a4995d-d2c0-4cd4-8f1c-bdcd013dcc23 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lazy-loading 'info_cache' on Instance uuid 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1485.332501] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c2a43780-e14a-4418-bfe8-539c46810864 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "refresh_cache-7855131e-d65c-40c3-8566-86a80ba8e5db" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1485.332785] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c2a43780-e14a-4418-bfe8-539c46810864 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired lock "refresh_cache-7855131e-d65c-40c3-8566-86a80ba8e5db" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1485.332830] env[68638]: DEBUG nova.network.neutron [None req-c2a43780-e14a-4418-bfe8-539c46810864 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1485.333015] env[68638]: DEBUG nova.objects.instance [None req-c2a43780-e14a-4418-bfe8-539c46810864 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lazy-loading 'info_cache' on Instance uuid 7855131e-d65c-40c3-8566-86a80ba8e5db {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1485.678146] env[68638]: DEBUG nova.objects.base [None req-a9a4995d-d2c0-4cd4-8f1c-bdcd013dcc23 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Object Instance<0e51f8b5-3ba0-408c-ab7a-9d29e99d7546> lazy-loaded attributes: flavor,info_cache {{(pid=68638) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1485.836092] env[68638]: DEBUG nova.objects.base [None req-c2a43780-e14a-4418-bfe8-539c46810864 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Object Instance<7855131e-d65c-40c3-8566-86a80ba8e5db> lazy-loaded attributes: flavor,info_cache {{(pid=68638) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1486.342173] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1486.370627] env[68638]: DEBUG nova.network.neutron [None req-a9a4995d-d2c0-4cd4-8f1c-bdcd013dcc23 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Updating instance_info_cache with network_info: [{"id": "2a6adb50-4ed4-4484-a4c6-2272e30f226a", "address": "fa:16:3e:10:24:f9", "network": {"id": "1011d63b-6b94-46e7-8fb7-2f1d20628113", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2061890536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9da776668a424815986399da431ae74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a6adb50-4e", "ovs_interfaceid": "2a6adb50-4ed4-4484-a4c6-2272e30f226a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1486.564321] env[68638]: DEBUG nova.network.neutron [None req-c2a43780-e14a-4418-bfe8-539c46810864 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Updating instance_info_cache with network_info: [{"id": "5aa25b45-f4df-4cce-af91-452fa7969cbb", "address": "fa:16:3e:5c:d8:13", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5aa25b45-f4", "ovs_interfaceid": "5aa25b45-f4df-4cce-af91-452fa7969cbb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1486.873141] env[68638]: DEBUG oslo_concurrency.lockutils [None req-a9a4995d-d2c0-4cd4-8f1c-bdcd013dcc23 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Releasing lock "refresh_cache-0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1487.067330] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c2a43780-e14a-4418-bfe8-539c46810864 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Releasing lock "refresh_cache-7855131e-d65c-40c3-8566-86a80ba8e5db" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1487.335682] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1487.340274] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager.update_available_resource {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1487.843931] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1487.843931] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1487.843931] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1487.843931] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68638) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1487.844707] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b1de7b-d76a-4ace-bc74-d3435f010ac4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.853120] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c599ac-1d5d-4f11-bb14-597331ecc7e2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.866256] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc5acd83-b371-4d95-be45-70ab1fbd11b5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.872146] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a05838-bbd0-46fc-961a-5f965a72cad5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.877635] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9a4995d-d2c0-4cd4-8f1c-bdcd013dcc23 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1487.900916] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b67e7275-adb3-411a-a060-b247c702bc2d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.902325] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180621MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=68638) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1487.902456] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1487.902655] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1487.908914] env[68638]: DEBUG oslo_vmware.api [None req-a9a4995d-d2c0-4cd4-8f1c-bdcd013dcc23 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1487.908914] env[68638]: value = "task-2834728" [ 1487.908914] env[68638]: _type = "Task" [ 1487.908914] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.916177] env[68638]: DEBUG oslo_vmware.api [None req-a9a4995d-d2c0-4cd4-8f1c-bdcd013dcc23 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834728, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.072891] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2a43780-e14a-4418-bfe8-539c46810864 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1488.073230] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2088055c-f67f-4058-b535-4a8a47e2769a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.080208] env[68638]: DEBUG oslo_vmware.api [None req-c2a43780-e14a-4418-bfe8-539c46810864 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1488.080208] env[68638]: value = "task-2834729" [ 1488.080208] env[68638]: _type = "Task" [ 1488.080208] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.088327] env[68638]: DEBUG oslo_vmware.api [None req-c2a43780-e14a-4418-bfe8-539c46810864 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834729, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.418785] env[68638]: DEBUG oslo_vmware.api [None req-a9a4995d-d2c0-4cd4-8f1c-bdcd013dcc23 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834728, 'name': PowerOnVM_Task, 'duration_secs': 0.39909} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.419667] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9a4995d-d2c0-4cd4-8f1c-bdcd013dcc23 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1488.419882] env[68638]: DEBUG nova.compute.manager [None req-a9a4995d-d2c0-4cd4-8f1c-bdcd013dcc23 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1488.420696] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec77d91c-a637-422c-b04b-ac2c93120946 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.590956] env[68638]: DEBUG oslo_vmware.api [None req-c2a43780-e14a-4418-bfe8-539c46810864 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834729, 'name': PowerOnVM_Task, 'duration_secs': 0.40015} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.591270] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2a43780-e14a-4418-bfe8-539c46810864 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1488.591470] env[68638]: DEBUG nova.compute.manager [None req-c2a43780-e14a-4418-bfe8-539c46810864 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1488.592286] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d25221-2f46-4c1f-8dd4-69296b9c40f4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.928405] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 7855131e-d65c-40c3-8566-86a80ba8e5db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1488.928649] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1488.928875] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1488.929052] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=960MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1488.971056] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd7e4b09-f7cc-4bc3-b2d5-a70cee39a17a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.978920] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d378a57b-a740-44a3-a3e3-33c7961f5df2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.009906] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d8e9bcf-d14b-4eee-a7db-4a2af231cb99 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.017173] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-551b93ef-9c3c-44f6-87de-d40f31e414f7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.030233] env[68638]: DEBUG nova.compute.provider_tree [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1489.230766] env[68638]: DEBUG oslo_concurrency.lockutils [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1489.231068] env[68638]: DEBUG oslo_concurrency.lockutils [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1489.231297] env[68638]: DEBUG oslo_concurrency.lockutils [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "0e51f8b5-3ba0-408c-ab7a-9d29e99d7546-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1489.231480] env[68638]: DEBUG oslo_concurrency.lockutils [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "0e51f8b5-3ba0-408c-ab7a-9d29e99d7546-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1489.231686] env[68638]: DEBUG oslo_concurrency.lockutils [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "0e51f8b5-3ba0-408c-ab7a-9d29e99d7546-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1489.233852] env[68638]: INFO nova.compute.manager [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Terminating instance [ 1489.535576] env[68638]: DEBUG nova.scheduler.client.report [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1489.540824] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-989ce3f3-d2f0-4ad5-bea3-5dfd58c02315 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.547366] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5f6cd561-28ca-4a60-839e-caa6c49650f3 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Suspending the VM {{(pid=68638) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1489.547606] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-d7867457-e326-4e99-9359-22ee5a167963 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.554830] env[68638]: DEBUG oslo_vmware.api [None req-5f6cd561-28ca-4a60-839e-caa6c49650f3 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1489.554830] env[68638]: value = "task-2834730" [ 1489.554830] env[68638]: _type = "Task" [ 1489.554830] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.562390] env[68638]: DEBUG oslo_vmware.api [None req-5f6cd561-28ca-4a60-839e-caa6c49650f3 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834730, 'name': SuspendVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.737833] env[68638]: DEBUG nova.compute.manager [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1489.738122] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1489.738999] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa085a9-d0aa-4d58-876d-3348566080cb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.746763] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1489.747039] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c22596a-082d-48e8-bb5c-606287ae5add {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.753411] env[68638]: DEBUG oslo_vmware.api [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1489.753411] env[68638]: value = "task-2834731" [ 1489.753411] env[68638]: _type = "Task" [ 1489.753411] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.761475] env[68638]: DEBUG oslo_vmware.api [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834731, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.041796] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68638) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1490.041796] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.139s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1490.068124] env[68638]: DEBUG oslo_vmware.api [None req-5f6cd561-28ca-4a60-839e-caa6c49650f3 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834730, 'name': SuspendVM_Task} progress is 70%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.265055] env[68638]: DEBUG oslo_vmware.api [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834731, 'name': PowerOffVM_Task, 'duration_secs': 0.166083} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.265055] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1490.265055] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1490.265055] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-423b1c48-cac2-4bff-bb62-ad8ec81ec7cc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.324573] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1490.324807] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1490.324981] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Deleting the datastore file [datastore1] 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1490.325269] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9c81a689-6bc5-402f-9206-dbd64ffe1944 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.331340] env[68638]: DEBUG oslo_vmware.api [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1490.331340] env[68638]: value = "task-2834733" [ 1490.331340] env[68638]: _type = "Task" [ 1490.331340] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.338622] env[68638]: DEBUG oslo_vmware.api [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834733, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.565471] env[68638]: DEBUG oslo_vmware.api [None req-5f6cd561-28ca-4a60-839e-caa6c49650f3 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834730, 'name': SuspendVM_Task, 'duration_secs': 0.889717} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.565751] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-5f6cd561-28ca-4a60-839e-caa6c49650f3 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Suspended the VM {{(pid=68638) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1490.565915] env[68638]: DEBUG nova.compute.manager [None req-5f6cd561-28ca-4a60-839e-caa6c49650f3 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1490.566698] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33af1026-a96a-47cd-a8b4-045203f6e751 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.840707] env[68638]: DEBUG oslo_vmware.api [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834733, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152693} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.840929] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1490.841114] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1490.841297] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1490.841468] env[68638]: INFO nova.compute.manager [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1490.841731] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1490.841925] env[68638]: DEBUG nova.compute.manager [-] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1490.842032] env[68638]: DEBUG nova.network.neutron [-] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1491.304026] env[68638]: DEBUG nova.compute.manager [req-edc5c975-1fdb-4701-97b3-065018d01a46 req-55282cef-5e3e-4321-9afa-db5d69b012ca service nova] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Received event network-vif-deleted-2a6adb50-4ed4-4484-a4c6-2272e30f226a {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1491.304293] env[68638]: INFO nova.compute.manager [req-edc5c975-1fdb-4701-97b3-065018d01a46 req-55282cef-5e3e-4321-9afa-db5d69b012ca service nova] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Neutron deleted interface 2a6adb50-4ed4-4484-a4c6-2272e30f226a; detaching it from the instance and deleting it from the info cache [ 1491.304559] env[68638]: DEBUG nova.network.neutron [req-edc5c975-1fdb-4701-97b3-065018d01a46 req-55282cef-5e3e-4321-9afa-db5d69b012ca service nova] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1491.761954] env[68638]: DEBUG nova.network.neutron [-] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1491.807364] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1e06c7ff-00a2-43d3-8ed1-f82a7b84bf5a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.817332] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f3335f4-0210-4e44-b3e0-f8ec3967a795 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.842388] env[68638]: DEBUG nova.compute.manager [req-edc5c975-1fdb-4701-97b3-065018d01a46 req-55282cef-5e3e-4321-9afa-db5d69b012ca service nova] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Detach interface failed, port_id=2a6adb50-4ed4-4484-a4c6-2272e30f226a, reason: Instance 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546 could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1491.880028] env[68638]: INFO nova.compute.manager [None req-76cab9e7-567b-4c34-93dd-9563507ff6d4 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Resuming [ 1491.880699] env[68638]: DEBUG nova.objects.instance [None req-76cab9e7-567b-4c34-93dd-9563507ff6d4 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lazy-loading 'flavor' on Instance uuid 7855131e-d65c-40c3-8566-86a80ba8e5db {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1492.265092] env[68638]: INFO nova.compute.manager [-] [instance: 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546] Took 1.42 seconds to deallocate network for instance. [ 1492.771431] env[68638]: DEBUG oslo_concurrency.lockutils [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1492.771819] env[68638]: DEBUG oslo_concurrency.lockutils [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1492.771999] env[68638]: DEBUG nova.objects.instance [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lazy-loading 'resources' on Instance uuid 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546 {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1493.316024] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9743a273-5bff-43b7-9b4c-a74d13811a31 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.322581] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8299c269-3ef0-4758-a2f4-d341cfe493aa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.352258] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f365f4ab-5e3a-4823-905d-fc75dc376291 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.358972] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750e1d5c-ef6d-4523-93dd-aa17383376d2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.372333] env[68638]: DEBUG nova.compute.provider_tree [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1493.390268] env[68638]: DEBUG oslo_concurrency.lockutils [None req-76cab9e7-567b-4c34-93dd-9563507ff6d4 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "refresh_cache-7855131e-d65c-40c3-8566-86a80ba8e5db" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.390458] env[68638]: DEBUG oslo_concurrency.lockutils [None req-76cab9e7-567b-4c34-93dd-9563507ff6d4 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquired lock "refresh_cache-7855131e-d65c-40c3-8566-86a80ba8e5db" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1493.390622] env[68638]: DEBUG nova.network.neutron [None req-76cab9e7-567b-4c34-93dd-9563507ff6d4 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1493.893360] env[68638]: ERROR nova.scheduler.client.report [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [req-d73e74c5-17dd-4285-9b2d-8edb00423bcf] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d73e74c5-17dd-4285-9b2d-8edb00423bcf"}]} [ 1493.911497] env[68638]: DEBUG nova.scheduler.client.report [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1493.924388] env[68638]: DEBUG nova.scheduler.client.report [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1493.924617] env[68638]: DEBUG nova.compute.provider_tree [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1493.937147] env[68638]: DEBUG nova.scheduler.client.report [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1493.953860] env[68638]: DEBUG nova.scheduler.client.report [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1493.989310] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366b00b8-1f01-45e0-a3c8-a7e995105948 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.996460] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9cdb8a8-2e46-477a-8440-f72e562de5d6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.029252] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c27b71c-49f0-4d6e-b6ac-c1b57a9b0ad8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.036201] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ddad7d2-8878-4c56-87d4-e685b7ec6e9e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.049044] env[68638]: DEBUG nova.compute.provider_tree [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1494.115095] env[68638]: DEBUG nova.network.neutron [None req-76cab9e7-567b-4c34-93dd-9563507ff6d4 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Updating instance_info_cache with network_info: [{"id": "5aa25b45-f4df-4cce-af91-452fa7969cbb", "address": "fa:16:3e:5c:d8:13", "network": {"id": "f58789bc-a36f-4092-a0bc-10fa06a6e566", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-264515557-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c238a05699ee42f9a3d69c16f0777ae9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5aa25b45-f4", "ovs_interfaceid": "5aa25b45-f4df-4cce-af91-452fa7969cbb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1494.578064] env[68638]: DEBUG nova.scheduler.client.report [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 185 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1494.578346] env[68638]: DEBUG nova.compute.provider_tree [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 185 to 186 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1494.578546] env[68638]: DEBUG nova.compute.provider_tree [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1494.617823] env[68638]: DEBUG oslo_concurrency.lockutils [None req-76cab9e7-567b-4c34-93dd-9563507ff6d4 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Releasing lock "refresh_cache-7855131e-d65c-40c3-8566-86a80ba8e5db" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1494.618786] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f39edda-e2cf-4aba-a1f2-f7eb2f1f4c04 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.625598] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-76cab9e7-567b-4c34-93dd-9563507ff6d4 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Resuming the VM {{(pid=68638) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1494.625825] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-351f281b-4ab9-4d59-8416-e10083733d11 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.631541] env[68638]: DEBUG oslo_vmware.api [None req-76cab9e7-567b-4c34-93dd-9563507ff6d4 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1494.631541] env[68638]: value = "task-2834734" [ 1494.631541] env[68638]: _type = "Task" [ 1494.631541] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.639738] env[68638]: DEBUG oslo_vmware.api [None req-76cab9e7-567b-4c34-93dd-9563507ff6d4 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834734, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.083922] env[68638]: DEBUG oslo_concurrency.lockutils [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.312s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1495.107405] env[68638]: INFO nova.scheduler.client.report [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Deleted allocations for instance 0e51f8b5-3ba0-408c-ab7a-9d29e99d7546 [ 1495.145614] env[68638]: DEBUG oslo_vmware.api [None req-76cab9e7-567b-4c34-93dd-9563507ff6d4 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834734, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.617126] env[68638]: DEBUG oslo_concurrency.lockutils [None req-54591800-2151-4469-85ba-28a9f0cd2dfa tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "0e51f8b5-3ba0-408c-ab7a-9d29e99d7546" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.386s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1495.642483] env[68638]: DEBUG oslo_vmware.api [None req-76cab9e7-567b-4c34-93dd-9563507ff6d4 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834734, 'name': PowerOnVM_Task, 'duration_secs': 0.512143} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.642785] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-76cab9e7-567b-4c34-93dd-9563507ff6d4 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Resumed the VM {{(pid=68638) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1495.643014] env[68638]: DEBUG nova.compute.manager [None req-76cab9e7-567b-4c34-93dd-9563507ff6d4 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1495.643775] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-177ba50a-b6f9-418f-baab-720e544802be {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.316766] env[68638]: DEBUG oslo_concurrency.lockutils [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "5c2ad03c-ece8-4ad1-a978-c67663628d2a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1496.317078] env[68638]: DEBUG oslo_concurrency.lockutils [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "5c2ad03c-ece8-4ad1-a978-c67663628d2a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1496.544779] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "7855131e-d65c-40c3-8566-86a80ba8e5db" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1496.545062] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "7855131e-d65c-40c3-8566-86a80ba8e5db" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1496.545326] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "7855131e-d65c-40c3-8566-86a80ba8e5db-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1496.545523] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "7855131e-d65c-40c3-8566-86a80ba8e5db-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1496.545702] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "7855131e-d65c-40c3-8566-86a80ba8e5db-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1496.547722] env[68638]: INFO nova.compute.manager [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Terminating instance [ 1496.819697] env[68638]: DEBUG nova.compute.manager [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1497.051258] env[68638]: DEBUG nova.compute.manager [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1497.051475] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1497.052441] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c74a6dee-c4ed-4b87-ba48-d06202d80ff2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.060429] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1497.060717] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5a08cdb8-bc84-4f81-b921-b0d4d01b92f8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.067044] env[68638]: DEBUG oslo_vmware.api [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1497.067044] env[68638]: value = "task-2834735" [ 1497.067044] env[68638]: _type = "Task" [ 1497.067044] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.075799] env[68638]: DEBUG oslo_vmware.api [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834735, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.341933] env[68638]: DEBUG oslo_concurrency.lockutils [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1497.342231] env[68638]: DEBUG oslo_concurrency.lockutils [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1497.343820] env[68638]: INFO nova.compute.claims [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1497.576546] env[68638]: DEBUG oslo_vmware.api [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834735, 'name': PowerOffVM_Task, 'duration_secs': 0.162568} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.576821] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1497.576995] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1497.577255] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c22daed6-aea6-4b31-90a3-58bd08c1b64b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.640240] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1497.640407] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1497.640589] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Deleting the datastore file [datastore2] 7855131e-d65c-40c3-8566-86a80ba8e5db {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1497.640897] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0df39ffe-5b87-444f-ac20-9eccbdf70ea6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.647584] env[68638]: DEBUG oslo_vmware.api [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for the task: (returnval){ [ 1497.647584] env[68638]: value = "task-2834737" [ 1497.647584] env[68638]: _type = "Task" [ 1497.647584] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.654997] env[68638]: DEBUG oslo_vmware.api [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834737, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.157000] env[68638]: DEBUG oslo_vmware.api [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Task: {'id': task-2834737, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150821} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.157285] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1498.157474] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1498.157658] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1498.157837] env[68638]: INFO nova.compute.manager [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1498.158090] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1498.158287] env[68638]: DEBUG nova.compute.manager [-] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1498.158383] env[68638]: DEBUG nova.network.neutron [-] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1498.387981] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd50440-bf04-43cb-9837-fb064533308f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.395741] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a4ab908-2cdd-4256-8eb5-188655dcf9f8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.424816] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-123bb099-6316-47ed-a7ce-ee88c61a6949 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.431588] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba6ff526-bd91-42f9-8a6a-fefeda6de159 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.445516] env[68638]: DEBUG nova.compute.provider_tree [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1498.568936] env[68638]: DEBUG nova.compute.manager [req-749cf7b6-3518-47a2-9652-9c8e2b43ccbc req-33299e73-075e-4dc6-b509-707e3fd18692 service nova] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Received event network-vif-deleted-5aa25b45-f4df-4cce-af91-452fa7969cbb {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1498.569165] env[68638]: INFO nova.compute.manager [req-749cf7b6-3518-47a2-9652-9c8e2b43ccbc req-33299e73-075e-4dc6-b509-707e3fd18692 service nova] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Neutron deleted interface 5aa25b45-f4df-4cce-af91-452fa7969cbb; detaching it from the instance and deleting it from the info cache [ 1498.569344] env[68638]: DEBUG nova.network.neutron [req-749cf7b6-3518-47a2-9652-9c8e2b43ccbc req-33299e73-075e-4dc6-b509-707e3fd18692 service nova] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1498.949920] env[68638]: DEBUG nova.scheduler.client.report [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1499.053703] env[68638]: DEBUG nova.network.neutron [-] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1499.071862] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fc6bebc7-7dd9-4763-8873-75e3530841f3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.081649] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b6468a-1dc8-4aeb-9c28-1dbdd159907f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.104704] env[68638]: DEBUG nova.compute.manager [req-749cf7b6-3518-47a2-9652-9c8e2b43ccbc req-33299e73-075e-4dc6-b509-707e3fd18692 service nova] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Detach interface failed, port_id=5aa25b45-f4df-4cce-af91-452fa7969cbb, reason: Instance 7855131e-d65c-40c3-8566-86a80ba8e5db could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1499.454903] env[68638]: DEBUG oslo_concurrency.lockutils [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.113s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1499.455469] env[68638]: DEBUG nova.compute.manager [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1499.556096] env[68638]: INFO nova.compute.manager [-] [instance: 7855131e-d65c-40c3-8566-86a80ba8e5db] Took 1.40 seconds to deallocate network for instance. [ 1499.960779] env[68638]: DEBUG nova.compute.utils [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1499.962278] env[68638]: DEBUG nova.compute.manager [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1499.962496] env[68638]: DEBUG nova.network.neutron [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1499.997689] env[68638]: DEBUG nova.policy [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1fd95244c44c44219ef19304882b99d4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9da776668a424815986399da431ae74f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1500.062067] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1500.062305] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1500.062533] env[68638]: DEBUG nova.objects.instance [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lazy-loading 'resources' on Instance uuid 7855131e-d65c-40c3-8566-86a80ba8e5db {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1500.237123] env[68638]: DEBUG nova.network.neutron [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Successfully created port: 22aa2a41-f217-4805-90c4-ceed1349f8cb {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1500.465852] env[68638]: DEBUG nova.compute.manager [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1500.603906] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8bf07cf-a536-4186-933c-05dcdc904541 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.611769] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d5029f3-115d-40ab-8561-aefd43e39d19 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.642563] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a247e74-2b85-48cc-90eb-8bdf908439de {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.649277] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a3d88c-ba64-4f0b-b221-97a58806b9eb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.661887] env[68638]: DEBUG nova.compute.provider_tree [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1501.165075] env[68638]: DEBUG nova.scheduler.client.report [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1501.476698] env[68638]: DEBUG nova.compute.manager [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1501.502569] env[68638]: DEBUG nova.virt.hardware [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1501.502830] env[68638]: DEBUG nova.virt.hardware [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1501.502990] env[68638]: DEBUG nova.virt.hardware [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1501.503246] env[68638]: DEBUG nova.virt.hardware [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1501.503757] env[68638]: DEBUG nova.virt.hardware [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1501.503757] env[68638]: DEBUG nova.virt.hardware [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1501.504370] env[68638]: DEBUG nova.virt.hardware [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1501.504578] env[68638]: DEBUG nova.virt.hardware [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1501.504762] env[68638]: DEBUG nova.virt.hardware [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1501.504947] env[68638]: DEBUG nova.virt.hardware [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1501.505121] env[68638]: DEBUG nova.virt.hardware [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1501.506075] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-964e3394-2f2f-48a9-8531-e684796f3b46 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.514209] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-531b5438-20c7-4aa6-a9fb-ee46763130c6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.596051] env[68638]: DEBUG nova.compute.manager [req-066449c3-647f-462a-8fd3-626329b9dc51 req-cc067370-801d-4de0-b45d-fef1cab3cb4c service nova] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Received event network-vif-plugged-22aa2a41-f217-4805-90c4-ceed1349f8cb {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1501.596270] env[68638]: DEBUG oslo_concurrency.lockutils [req-066449c3-647f-462a-8fd3-626329b9dc51 req-cc067370-801d-4de0-b45d-fef1cab3cb4c service nova] Acquiring lock "5c2ad03c-ece8-4ad1-a978-c67663628d2a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1501.596475] env[68638]: DEBUG oslo_concurrency.lockutils [req-066449c3-647f-462a-8fd3-626329b9dc51 req-cc067370-801d-4de0-b45d-fef1cab3cb4c service nova] Lock "5c2ad03c-ece8-4ad1-a978-c67663628d2a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1501.596598] env[68638]: DEBUG oslo_concurrency.lockutils [req-066449c3-647f-462a-8fd3-626329b9dc51 req-cc067370-801d-4de0-b45d-fef1cab3cb4c service nova] Lock "5c2ad03c-ece8-4ad1-a978-c67663628d2a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1501.596735] env[68638]: DEBUG nova.compute.manager [req-066449c3-647f-462a-8fd3-626329b9dc51 req-cc067370-801d-4de0-b45d-fef1cab3cb4c service nova] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] No waiting events found dispatching network-vif-plugged-22aa2a41-f217-4805-90c4-ceed1349f8cb {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1501.596888] env[68638]: WARNING nova.compute.manager [req-066449c3-647f-462a-8fd3-626329b9dc51 req-cc067370-801d-4de0-b45d-fef1cab3cb4c service nova] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Received unexpected event network-vif-plugged-22aa2a41-f217-4805-90c4-ceed1349f8cb for instance with vm_state building and task_state spawning. [ 1501.669868] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.607s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1501.674767] env[68638]: DEBUG nova.network.neutron [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Successfully updated port: 22aa2a41-f217-4805-90c4-ceed1349f8cb {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1501.693127] env[68638]: INFO nova.scheduler.client.report [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Deleted allocations for instance 7855131e-d65c-40c3-8566-86a80ba8e5db [ 1502.178528] env[68638]: DEBUG oslo_concurrency.lockutils [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "refresh_cache-5c2ad03c-ece8-4ad1-a978-c67663628d2a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1502.178660] env[68638]: DEBUG oslo_concurrency.lockutils [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquired lock "refresh_cache-5c2ad03c-ece8-4ad1-a978-c67663628d2a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1502.178847] env[68638]: DEBUG nova.network.neutron [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1502.201570] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f4ee8ff0-1ac6-44b1-9981-aa1003af6566 tempest-ServerActionsTestJSON-2070658516 tempest-ServerActionsTestJSON-2070658516-project-member] Lock "7855131e-d65c-40c3-8566-86a80ba8e5db" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.656s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1502.717692] env[68638]: DEBUG nova.network.neutron [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1502.847388] env[68638]: DEBUG nova.network.neutron [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Updating instance_info_cache with network_info: [{"id": "22aa2a41-f217-4805-90c4-ceed1349f8cb", "address": "fa:16:3e:6f:e1:d3", "network": {"id": "1011d63b-6b94-46e7-8fb7-2f1d20628113", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2061890536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9da776668a424815986399da431ae74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22aa2a41-f2", "ovs_interfaceid": "22aa2a41-f217-4805-90c4-ceed1349f8cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1503.350326] env[68638]: DEBUG oslo_concurrency.lockutils [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Releasing lock "refresh_cache-5c2ad03c-ece8-4ad1-a978-c67663628d2a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1503.350634] env[68638]: DEBUG nova.compute.manager [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Instance network_info: |[{"id": "22aa2a41-f217-4805-90c4-ceed1349f8cb", "address": "fa:16:3e:6f:e1:d3", "network": {"id": "1011d63b-6b94-46e7-8fb7-2f1d20628113", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2061890536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9da776668a424815986399da431ae74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22aa2a41-f2", "ovs_interfaceid": "22aa2a41-f217-4805-90c4-ceed1349f8cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1503.351096] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:e1:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ded18042-834c-4792-b3e8-b1c377446432', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '22aa2a41-f217-4805-90c4-ceed1349f8cb', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1503.358996] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1503.359229] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1503.359451] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb911033-68bf-44a2-95a3-f9044608197b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.380047] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1503.380047] env[68638]: value = "task-2834738" [ 1503.380047] env[68638]: _type = "Task" [ 1503.380047] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.387429] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834738, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.644123] env[68638]: DEBUG nova.compute.manager [req-5b291438-306e-4425-8c7b-1f014283bf30 req-83f89de0-99b2-4bc6-a1fc-6d8fb2c67d79 service nova] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Received event network-changed-22aa2a41-f217-4805-90c4-ceed1349f8cb {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1503.644274] env[68638]: DEBUG nova.compute.manager [req-5b291438-306e-4425-8c7b-1f014283bf30 req-83f89de0-99b2-4bc6-a1fc-6d8fb2c67d79 service nova] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Refreshing instance network info cache due to event network-changed-22aa2a41-f217-4805-90c4-ceed1349f8cb. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1503.644495] env[68638]: DEBUG oslo_concurrency.lockutils [req-5b291438-306e-4425-8c7b-1f014283bf30 req-83f89de0-99b2-4bc6-a1fc-6d8fb2c67d79 service nova] Acquiring lock "refresh_cache-5c2ad03c-ece8-4ad1-a978-c67663628d2a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1503.644638] env[68638]: DEBUG oslo_concurrency.lockutils [req-5b291438-306e-4425-8c7b-1f014283bf30 req-83f89de0-99b2-4bc6-a1fc-6d8fb2c67d79 service nova] Acquired lock "refresh_cache-5c2ad03c-ece8-4ad1-a978-c67663628d2a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1503.644814] env[68638]: DEBUG nova.network.neutron [req-5b291438-306e-4425-8c7b-1f014283bf30 req-83f89de0-99b2-4bc6-a1fc-6d8fb2c67d79 service nova] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Refreshing network info cache for port 22aa2a41-f217-4805-90c4-ceed1349f8cb {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1503.889820] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834738, 'name': CreateVM_Task, 'duration_secs': 0.312704} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.890120] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1503.890702] env[68638]: DEBUG oslo_concurrency.lockutils [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1503.890871] env[68638]: DEBUG oslo_concurrency.lockutils [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1503.891208] env[68638]: DEBUG oslo_concurrency.lockutils [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1503.891461] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40b42ef6-afc0-4197-b3a4-84b6e49475aa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.895687] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1503.895687] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52662588-2838-9701-173b-5d909b5fc1fe" [ 1503.895687] env[68638]: _type = "Task" [ 1503.895687] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.903125] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52662588-2838-9701-173b-5d909b5fc1fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.335490] env[68638]: DEBUG nova.network.neutron [req-5b291438-306e-4425-8c7b-1f014283bf30 req-83f89de0-99b2-4bc6-a1fc-6d8fb2c67d79 service nova] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Updated VIF entry in instance network info cache for port 22aa2a41-f217-4805-90c4-ceed1349f8cb. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1504.335847] env[68638]: DEBUG nova.network.neutron [req-5b291438-306e-4425-8c7b-1f014283bf30 req-83f89de0-99b2-4bc6-a1fc-6d8fb2c67d79 service nova] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Updating instance_info_cache with network_info: [{"id": "22aa2a41-f217-4805-90c4-ceed1349f8cb", "address": "fa:16:3e:6f:e1:d3", "network": {"id": "1011d63b-6b94-46e7-8fb7-2f1d20628113", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2061890536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9da776668a424815986399da431ae74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22aa2a41-f2", "ovs_interfaceid": "22aa2a41-f217-4805-90c4-ceed1349f8cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1504.407226] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52662588-2838-9701-173b-5d909b5fc1fe, 'name': SearchDatastore_Task, 'duration_secs': 0.010868} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.407586] env[68638]: DEBUG oslo_concurrency.lockutils [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1504.407825] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1504.408101] env[68638]: DEBUG oslo_concurrency.lockutils [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1504.408256] env[68638]: DEBUG oslo_concurrency.lockutils [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1504.408443] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1504.408715] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fe827e10-fe08-4a3f-a82b-d55919e58421 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.417525] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1504.417707] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1504.418427] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62c4ff24-0422-4848-9f06-6d3baeb97587 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.423413] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1504.423413] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52c80ee1-4f26-41e3-2f12-e3adb1a81101" [ 1504.423413] env[68638]: _type = "Task" [ 1504.423413] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.430877] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52c80ee1-4f26-41e3-2f12-e3adb1a81101, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.838907] env[68638]: DEBUG oslo_concurrency.lockutils [req-5b291438-306e-4425-8c7b-1f014283bf30 req-83f89de0-99b2-4bc6-a1fc-6d8fb2c67d79 service nova] Releasing lock "refresh_cache-5c2ad03c-ece8-4ad1-a978-c67663628d2a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1504.936029] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52c80ee1-4f26-41e3-2f12-e3adb1a81101, 'name': SearchDatastore_Task, 'duration_secs': 0.008323} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.936444] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a39e215e-f09a-4ea0-a55b-a449d84a7bcb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.941792] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1504.941792] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]529f3a0c-8ba0-0f82-0f9d-d3416baacc32" [ 1504.941792] env[68638]: _type = "Task" [ 1504.941792] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.949865] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]529f3a0c-8ba0-0f82-0f9d-d3416baacc32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.037072] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1505.456576] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]529f3a0c-8ba0-0f82-0f9d-d3416baacc32, 'name': SearchDatastore_Task, 'duration_secs': 0.009703} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.456901] env[68638]: DEBUG oslo_concurrency.lockutils [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1505.457142] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 5c2ad03c-ece8-4ad1-a978-c67663628d2a/5c2ad03c-ece8-4ad1-a978-c67663628d2a.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1505.457416] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f0bd8a78-5dd7-4ac1-b662-e3cdd0f040cf {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.465616] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1505.465616] env[68638]: value = "task-2834739" [ 1505.465616] env[68638]: _type = "Task" [ 1505.465616] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.473828] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834739, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.975789] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834739, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.476331] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834739, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524637} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.476706] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] 5c2ad03c-ece8-4ad1-a978-c67663628d2a/5c2ad03c-ece8-4ad1-a978-c67663628d2a.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1506.476945] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1506.477217] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9a9a4564-63b2-4383-a06a-de4659fa2bc0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.484086] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1506.484086] env[68638]: value = "task-2834740" [ 1506.484086] env[68638]: _type = "Task" [ 1506.484086] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.491011] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834740, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.994350] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834740, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060047} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.994728] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1506.995383] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4968ec5e-646b-40a6-ae34-451b55c2dcff {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.016297] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Reconfiguring VM instance instance-0000007f to attach disk [datastore1] 5c2ad03c-ece8-4ad1-a978-c67663628d2a/5c2ad03c-ece8-4ad1-a978-c67663628d2a.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1507.016531] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e40a58c0-549d-4ce1-b7f6-e3d529a5844d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.034759] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1507.034759] env[68638]: value = "task-2834741" [ 1507.034759] env[68638]: _type = "Task" [ 1507.034759] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.042031] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834741, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.544879] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834741, 'name': ReconfigVM_Task, 'duration_secs': 0.271282} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.545164] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Reconfigured VM instance instance-0000007f to attach disk [datastore1] 5c2ad03c-ece8-4ad1-a978-c67663628d2a/5c2ad03c-ece8-4ad1-a978-c67663628d2a.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1507.545793] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-79b65956-0dff-4d26-a9d9-1147821e41bd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.552733] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1507.552733] env[68638]: value = "task-2834742" [ 1507.552733] env[68638]: _type = "Task" [ 1507.552733] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.559693] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834742, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.062499] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834742, 'name': Rename_Task, 'duration_secs': 0.134668} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.062853] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1508.063040] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5b4cfc73-3f50-4d34-8c0b-e60fc2bc3d8e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.069016] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1508.069016] env[68638]: value = "task-2834743" [ 1508.069016] env[68638]: _type = "Task" [ 1508.069016] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.075907] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834743, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.580557] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834743, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.079643] env[68638]: DEBUG oslo_vmware.api [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834743, 'name': PowerOnVM_Task, 'duration_secs': 0.99634} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.080062] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1509.080148] env[68638]: INFO nova.compute.manager [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Took 7.60 seconds to spawn the instance on the hypervisor. [ 1509.080291] env[68638]: DEBUG nova.compute.manager [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1509.081118] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e2ae26e-90d4-4fee-9078-0beafcc339e1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.597378] env[68638]: INFO nova.compute.manager [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Took 12.27 seconds to build instance. [ 1510.099594] env[68638]: DEBUG oslo_concurrency.lockutils [None req-86af6817-2ee7-40ab-8a22-4efba74174db tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "5c2ad03c-ece8-4ad1-a978-c67663628d2a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.782s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1510.604331] env[68638]: DEBUG nova.compute.manager [req-ffaecd8a-d050-4f75-81ff-02a156f6de29 req-58228877-501d-48bf-a576-ae06a042c84b service nova] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Received event network-changed-22aa2a41-f217-4805-90c4-ceed1349f8cb {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1510.605518] env[68638]: DEBUG nova.compute.manager [req-ffaecd8a-d050-4f75-81ff-02a156f6de29 req-58228877-501d-48bf-a576-ae06a042c84b service nova] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Refreshing instance network info cache due to event network-changed-22aa2a41-f217-4805-90c4-ceed1349f8cb. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1510.605756] env[68638]: DEBUG oslo_concurrency.lockutils [req-ffaecd8a-d050-4f75-81ff-02a156f6de29 req-58228877-501d-48bf-a576-ae06a042c84b service nova] Acquiring lock "refresh_cache-5c2ad03c-ece8-4ad1-a978-c67663628d2a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1510.605910] env[68638]: DEBUG oslo_concurrency.lockutils [req-ffaecd8a-d050-4f75-81ff-02a156f6de29 req-58228877-501d-48bf-a576-ae06a042c84b service nova] Acquired lock "refresh_cache-5c2ad03c-ece8-4ad1-a978-c67663628d2a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1510.606093] env[68638]: DEBUG nova.network.neutron [req-ffaecd8a-d050-4f75-81ff-02a156f6de29 req-58228877-501d-48bf-a576-ae06a042c84b service nova] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Refreshing network info cache for port 22aa2a41-f217-4805-90c4-ceed1349f8cb {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1511.295332] env[68638]: DEBUG nova.network.neutron [req-ffaecd8a-d050-4f75-81ff-02a156f6de29 req-58228877-501d-48bf-a576-ae06a042c84b service nova] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Updated VIF entry in instance network info cache for port 22aa2a41-f217-4805-90c4-ceed1349f8cb. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1511.295689] env[68638]: DEBUG nova.network.neutron [req-ffaecd8a-d050-4f75-81ff-02a156f6de29 req-58228877-501d-48bf-a576-ae06a042c84b service nova] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Updating instance_info_cache with network_info: [{"id": "22aa2a41-f217-4805-90c4-ceed1349f8cb", "address": "fa:16:3e:6f:e1:d3", "network": {"id": "1011d63b-6b94-46e7-8fb7-2f1d20628113", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2061890536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9da776668a424815986399da431ae74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22aa2a41-f2", "ovs_interfaceid": "22aa2a41-f217-4805-90c4-ceed1349f8cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1511.798517] env[68638]: DEBUG oslo_concurrency.lockutils [req-ffaecd8a-d050-4f75-81ff-02a156f6de29 req-58228877-501d-48bf-a576-ae06a042c84b service nova] Releasing lock "refresh_cache-5c2ad03c-ece8-4ad1-a978-c67663628d2a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1541.340648] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1542.339690] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1542.339956] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1543.340051] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1543.340417] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1543.340417] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68638) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1546.341236] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1547.336309] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1547.339922] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager.update_available_resource {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1547.843089] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1547.843523] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1547.843523] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1547.843670] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68638) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1547.844634] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2e3e3b1-686b-418d-862e-62b970cd6868 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.853369] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74eae427-f874-4010-b356-d52b2c56590f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.867787] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93c97b7-4fb4-482d-9ac3-e67eba2f0bd2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.873857] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aca338b-ec0f-494e-878c-8080d5c16b6d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.901853] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181045MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=68638) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1547.901979] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1547.902216] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1548.784987] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ec817557-a3bd-46d4-bbb8-a8c9fb1410f1 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "5c2ad03c-ece8-4ad1-a978-c67663628d2a" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1548.785275] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ec817557-a3bd-46d4-bbb8-a8c9fb1410f1 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "5c2ad03c-ece8-4ad1-a978-c67663628d2a" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1548.928498] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance 5c2ad03c-ece8-4ad1-a978-c67663628d2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1548.928779] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1548.928861] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1548.954089] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d426f3c-8b83-4d18-ada7-b41b0c16bce5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.962170] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b518eb25-39cc-4ce7-81d5-4124fce08ade {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.993241] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c249afa-0b63-4ad9-98c9-4935f7c8e34b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.000072] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ee3e65-24cd-46df-ac70-50f72ec466d1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.012912] env[68638]: DEBUG nova.compute.provider_tree [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1549.288508] env[68638]: DEBUG nova.compute.utils [None req-ec817557-a3bd-46d4-bbb8-a8c9fb1410f1 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1549.533333] env[68638]: ERROR nova.scheduler.client.report [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] [req-3010711c-b3bc-453f-b673-b4858a15d79b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3010711c-b3bc-453f-b673-b4858a15d79b"}]} [ 1549.549094] env[68638]: DEBUG nova.scheduler.client.report [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1549.561244] env[68638]: DEBUG nova.scheduler.client.report [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1549.561436] env[68638]: DEBUG nova.compute.provider_tree [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1549.570896] env[68638]: DEBUG nova.scheduler.client.report [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1549.586828] env[68638]: DEBUG nova.scheduler.client.report [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1549.610744] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6320b41a-8d7e-41a5-9eda-992ee778ceca {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.618026] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c064f562-2e7c-4dc5-8db4-4da30f8e5c6f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.646656] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7712913d-c097-4c5f-8c9f-12f48278bcd5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.653106] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa412ac0-49fb-4d96-9288-de47bcb64691 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.666398] env[68638]: DEBUG nova.compute.provider_tree [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1549.791552] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ec817557-a3bd-46d4-bbb8-a8c9fb1410f1 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "5c2ad03c-ece8-4ad1-a978-c67663628d2a" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1550.195181] env[68638]: DEBUG nova.scheduler.client.report [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 187 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1550.195465] env[68638]: DEBUG nova.compute.provider_tree [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 187 to 188 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1550.195553] env[68638]: DEBUG nova.compute.provider_tree [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1550.700376] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68638) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1550.700567] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.798s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1550.855945] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ec817557-a3bd-46d4-bbb8-a8c9fb1410f1 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "5c2ad03c-ece8-4ad1-a978-c67663628d2a" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1550.856236] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ec817557-a3bd-46d4-bbb8-a8c9fb1410f1 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "5c2ad03c-ece8-4ad1-a978-c67663628d2a" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1550.856468] env[68638]: INFO nova.compute.manager [None req-ec817557-a3bd-46d4-bbb8-a8c9fb1410f1 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Attaching volume e5a18d44-57c2-4d62-8a2c-fabe4b9fd0d5 to /dev/sdb [ 1550.886451] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a9017c8-3eca-4f64-ab6a-1909229f9cac {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.893457] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8089ea08-4ffd-442a-90a1-a13a5142163b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.906459] env[68638]: DEBUG nova.virt.block_device [None req-ec817557-a3bd-46d4-bbb8-a8c9fb1410f1 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Updating existing volume attachment record: 12cde583-76fc-4f31-a79e-fadbe8664829 {{(pid=68638) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1555.449725] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec817557-a3bd-46d4-bbb8-a8c9fb1410f1 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Volume attach. Driver type: vmdk {{(pid=68638) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1555.450014] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec817557-a3bd-46d4-bbb8-a8c9fb1410f1 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570079', 'volume_id': 'e5a18d44-57c2-4d62-8a2c-fabe4b9fd0d5', 'name': 'volume-e5a18d44-57c2-4d62-8a2c-fabe4b9fd0d5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5c2ad03c-ece8-4ad1-a978-c67663628d2a', 'attached_at': '', 'detached_at': '', 'volume_id': 'e5a18d44-57c2-4d62-8a2c-fabe4b9fd0d5', 'serial': 'e5a18d44-57c2-4d62-8a2c-fabe4b9fd0d5'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1555.450837] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4635748a-afc2-4d58-8be5-436c54063afd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.466848] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d068aca-0807-4d3d-8397-131902dee2c8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.490706] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec817557-a3bd-46d4-bbb8-a8c9fb1410f1 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Reconfiguring VM instance instance-0000007f to attach disk [datastore2] volume-e5a18d44-57c2-4d62-8a2c-fabe4b9fd0d5/volume-e5a18d44-57c2-4d62-8a2c-fabe4b9fd0d5.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1555.490931] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-25ce07df-5fdc-4fa3-84d9-f3c729bdc113 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.507029] env[68638]: DEBUG oslo_vmware.api [None req-ec817557-a3bd-46d4-bbb8-a8c9fb1410f1 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1555.507029] env[68638]: value = "task-2834748" [ 1555.507029] env[68638]: _type = "Task" [ 1555.507029] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.514082] env[68638]: DEBUG oslo_vmware.api [None req-ec817557-a3bd-46d4-bbb8-a8c9fb1410f1 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834748, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.016956] env[68638]: DEBUG oslo_vmware.api [None req-ec817557-a3bd-46d4-bbb8-a8c9fb1410f1 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834748, 'name': ReconfigVM_Task, 'duration_secs': 0.384002} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.017260] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec817557-a3bd-46d4-bbb8-a8c9fb1410f1 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Reconfigured VM instance instance-0000007f to attach disk [datastore2] volume-e5a18d44-57c2-4d62-8a2c-fabe4b9fd0d5/volume-e5a18d44-57c2-4d62-8a2c-fabe4b9fd0d5.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1556.021909] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26dc6348-5f9d-448a-ac37-caaf530ed3da {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.036322] env[68638]: DEBUG oslo_vmware.api [None req-ec817557-a3bd-46d4-bbb8-a8c9fb1410f1 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1556.036322] env[68638]: value = "task-2834749" [ 1556.036322] env[68638]: _type = "Task" [ 1556.036322] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.044974] env[68638]: DEBUG oslo_vmware.api [None req-ec817557-a3bd-46d4-bbb8-a8c9fb1410f1 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834749, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.545709] env[68638]: DEBUG oslo_vmware.api [None req-ec817557-a3bd-46d4-bbb8-a8c9fb1410f1 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834749, 'name': ReconfigVM_Task, 'duration_secs': 0.137375} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.546046] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec817557-a3bd-46d4-bbb8-a8c9fb1410f1 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570079', 'volume_id': 'e5a18d44-57c2-4d62-8a2c-fabe4b9fd0d5', 'name': 'volume-e5a18d44-57c2-4d62-8a2c-fabe4b9fd0d5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5c2ad03c-ece8-4ad1-a978-c67663628d2a', 'attached_at': '', 'detached_at': '', 'volume_id': 'e5a18d44-57c2-4d62-8a2c-fabe4b9fd0d5', 'serial': 'e5a18d44-57c2-4d62-8a2c-fabe4b9fd0d5'} {{(pid=68638) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1557.580427] env[68638]: DEBUG nova.objects.instance [None req-ec817557-a3bd-46d4-bbb8-a8c9fb1410f1 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lazy-loading 'flavor' on Instance uuid 5c2ad03c-ece8-4ad1-a978-c67663628d2a {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1558.086956] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ec817557-a3bd-46d4-bbb8-a8c9fb1410f1 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "5c2ad03c-ece8-4ad1-a978-c67663628d2a" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.230s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1558.923776] env[68638]: DEBUG nova.compute.manager [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Stashing vm_state: active {{(pid=68638) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1559.444216] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1559.444505] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1559.949337] env[68638]: INFO nova.compute.claims [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1560.455957] env[68638]: INFO nova.compute.resource_tracker [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Updating resource usage from migration e6c5cb32-75b6-4abb-9721-802e59150514 [ 1560.492872] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff704f80-eb5c-4db9-a843-72692d3705b4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.500331] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8ef893-b8e6-4650-a4e1-08bf6bb2e53a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.529869] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd15fcc1-4133-405f-9d1d-0d0c5586df6c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.536511] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9f1d0f-8366-431b-97cf-09ed41cf37e0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.549177] env[68638]: DEBUG nova.compute.provider_tree [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1561.069455] env[68638]: ERROR nova.scheduler.client.report [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [req-3b5c83db-ddc2-4987-96c4-199d46e52767] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3b5c83db-ddc2-4987-96c4-199d46e52767"}]} [ 1561.084661] env[68638]: DEBUG nova.scheduler.client.report [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1561.095651] env[68638]: DEBUG nova.scheduler.client.report [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1561.095879] env[68638]: DEBUG nova.compute.provider_tree [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1561.107810] env[68638]: DEBUG nova.scheduler.client.report [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1561.122765] env[68638]: DEBUG nova.scheduler.client.report [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1561.155057] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b97b7958-6e9b-490e-b3e6-2f749d7be361 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.162831] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ead87199-25cb-44a0-ba28-ac671e2fb20c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.191431] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ff0f6a-4361-400d-9264-c20822bb6128 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.198074] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e851c89-3e52-417c-911c-0ee5670a7eaa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.211541] env[68638]: DEBUG nova.compute.provider_tree [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1561.742402] env[68638]: DEBUG nova.scheduler.client.report [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 190 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1561.742670] env[68638]: DEBUG nova.compute.provider_tree [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 190 to 191 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1561.742849] env[68638]: DEBUG nova.compute.provider_tree [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1562.247136] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.802s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1562.247387] env[68638]: INFO nova.compute.manager [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Migrating [ 1562.761626] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "refresh_cache-5c2ad03c-ece8-4ad1-a978-c67663628d2a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1562.761820] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquired lock "refresh_cache-5c2ad03c-ece8-4ad1-a978-c67663628d2a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1562.761992] env[68638]: DEBUG nova.network.neutron [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1563.477662] env[68638]: DEBUG nova.network.neutron [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Updating instance_info_cache with network_info: [{"id": "22aa2a41-f217-4805-90c4-ceed1349f8cb", "address": "fa:16:3e:6f:e1:d3", "network": {"id": "1011d63b-6b94-46e7-8fb7-2f1d20628113", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2061890536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9da776668a424815986399da431ae74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22aa2a41-f2", "ovs_interfaceid": "22aa2a41-f217-4805-90c4-ceed1349f8cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1563.980044] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Releasing lock "refresh_cache-5c2ad03c-ece8-4ad1-a978-c67663628d2a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1565.496834] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec607d03-e070-4d2e-82fe-5b65667f3a02 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.519057] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Updating instance '5c2ad03c-ece8-4ad1-a978-c67663628d2a' progress to 0 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1566.025411] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1566.025725] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4392972d-3626-463e-b553-0dca776dbc1c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.032786] env[68638]: DEBUG oslo_vmware.api [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1566.032786] env[68638]: value = "task-2834750" [ 1566.032786] env[68638]: _type = "Task" [ 1566.032786] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.040720] env[68638]: DEBUG oslo_vmware.api [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834750, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.542660] env[68638]: DEBUG oslo_vmware.api [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834750, 'name': PowerOffVM_Task, 'duration_secs': 0.214467} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.543102] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1566.543102] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Updating instance '5c2ad03c-ece8-4ad1-a978-c67663628d2a' progress to 17 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1567.049942] env[68638]: DEBUG nova.virt.hardware [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1567.050190] env[68638]: DEBUG nova.virt.hardware [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1567.050392] env[68638]: DEBUG nova.virt.hardware [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1567.050596] env[68638]: DEBUG nova.virt.hardware [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1567.050745] env[68638]: DEBUG nova.virt.hardware [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1567.050893] env[68638]: DEBUG nova.virt.hardware [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1567.051111] env[68638]: DEBUG nova.virt.hardware [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1567.051274] env[68638]: DEBUG nova.virt.hardware [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1567.051473] env[68638]: DEBUG nova.virt.hardware [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1567.051642] env[68638]: DEBUG nova.virt.hardware [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1567.051861] env[68638]: DEBUG nova.virt.hardware [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1567.057146] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6a00a31-e6a6-4117-8b9c-a973ef2de102 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.072343] env[68638]: DEBUG oslo_vmware.api [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1567.072343] env[68638]: value = "task-2834751" [ 1567.072343] env[68638]: _type = "Task" [ 1567.072343] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.079997] env[68638]: DEBUG oslo_vmware.api [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834751, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.581506] env[68638]: DEBUG oslo_vmware.api [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834751, 'name': ReconfigVM_Task, 'duration_secs': 0.16975} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.581879] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Updating instance '5c2ad03c-ece8-4ad1-a978-c67663628d2a' progress to 33 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1568.088828] env[68638]: DEBUG nova.virt.hardware [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1568.089207] env[68638]: DEBUG nova.virt.hardware [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1568.089439] env[68638]: DEBUG nova.virt.hardware [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1568.089697] env[68638]: DEBUG nova.virt.hardware [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1568.089899] env[68638]: DEBUG nova.virt.hardware [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1568.090123] env[68638]: DEBUG nova.virt.hardware [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1568.090427] env[68638]: DEBUG nova.virt.hardware [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1568.090653] env[68638]: DEBUG nova.virt.hardware [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1568.090888] env[68638]: DEBUG nova.virt.hardware [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1568.091133] env[68638]: DEBUG nova.virt.hardware [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1568.091395] env[68638]: DEBUG nova.virt.hardware [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1568.100252] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Reconfiguring VM instance instance-0000007f to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1568.100626] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56a0e952-e893-487f-a572-f03348fa055f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.128737] env[68638]: DEBUG oslo_vmware.api [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1568.128737] env[68638]: value = "task-2834752" [ 1568.128737] env[68638]: _type = "Task" [ 1568.128737] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.136639] env[68638]: DEBUG oslo_vmware.api [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834752, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.638552] env[68638]: DEBUG oslo_vmware.api [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834752, 'name': ReconfigVM_Task, 'duration_secs': 0.182493} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.638925] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Reconfigured VM instance instance-0000007f to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1568.639625] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128e0a90-41f3-47aa-86f9-5f89fbe71262 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.663222] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Reconfiguring VM instance instance-0000007f to attach disk [datastore1] 5c2ad03c-ece8-4ad1-a978-c67663628d2a/5c2ad03c-ece8-4ad1-a978-c67663628d2a.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1568.663437] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-616a4985-1845-42bf-ae03-2d70d786cfa3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.680369] env[68638]: DEBUG oslo_vmware.api [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1568.680369] env[68638]: value = "task-2834753" [ 1568.680369] env[68638]: _type = "Task" [ 1568.680369] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.687496] env[68638]: DEBUG oslo_vmware.api [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834753, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.191151] env[68638]: DEBUG oslo_vmware.api [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834753, 'name': ReconfigVM_Task, 'duration_secs': 0.268324} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.191457] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Reconfigured VM instance instance-0000007f to attach disk [datastore1] 5c2ad03c-ece8-4ad1-a978-c67663628d2a/5c2ad03c-ece8-4ad1-a978-c67663628d2a.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1569.191696] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Updating instance '5c2ad03c-ece8-4ad1-a978-c67663628d2a' progress to 50 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1569.698535] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9f73f1e-095e-4ff7-803d-dd40ec30c14d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.719601] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5af07caf-29d0-4f98-8046-bd667dc5554e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.738734] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Updating instance '5c2ad03c-ece8-4ad1-a978-c67663628d2a' progress to 67 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1571.373498] env[68638]: DEBUG nova.network.neutron [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Port 22aa2a41-f217-4805-90c4-ceed1349f8cb binding to destination host cpu-1 is already ACTIVE {{(pid=68638) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1572.395853] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "5c2ad03c-ece8-4ad1-a978-c67663628d2a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1572.395853] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "5c2ad03c-ece8-4ad1-a978-c67663628d2a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1572.395853] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "5c2ad03c-ece8-4ad1-a978-c67663628d2a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1573.431060] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "refresh_cache-5c2ad03c-ece8-4ad1-a978-c67663628d2a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1573.431358] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquired lock "refresh_cache-5c2ad03c-ece8-4ad1-a978-c67663628d2a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1573.431478] env[68638]: DEBUG nova.network.neutron [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1574.116325] env[68638]: DEBUG nova.network.neutron [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Updating instance_info_cache with network_info: [{"id": "22aa2a41-f217-4805-90c4-ceed1349f8cb", "address": "fa:16:3e:6f:e1:d3", "network": {"id": "1011d63b-6b94-46e7-8fb7-2f1d20628113", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2061890536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9da776668a424815986399da431ae74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22aa2a41-f2", "ovs_interfaceid": "22aa2a41-f217-4805-90c4-ceed1349f8cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1574.618931] env[68638]: DEBUG oslo_concurrency.lockutils [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Releasing lock "refresh_cache-5c2ad03c-ece8-4ad1-a978-c67663628d2a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1575.128292] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739fabe2-b5f9-40ee-b583-b6e75d945833 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.135563] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d63ed0d-b67e-4dcf-9976-d128b9347ffb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.227379] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4794d6e-229a-4351-b680-19d6a6710119 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.248334] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee0288d7-add4-4ae5-9ce9-10dad4757ac5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.254768] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Updating instance '5c2ad03c-ece8-4ad1-a978-c67663628d2a' progress to 83 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1576.760993] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1576.761291] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5a577ed8-b853-4cab-b2ef-cc3900d899cc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.768665] env[68638]: DEBUG oslo_vmware.api [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1576.768665] env[68638]: value = "task-2834754" [ 1576.768665] env[68638]: _type = "Task" [ 1576.768665] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.776089] env[68638]: DEBUG oslo_vmware.api [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834754, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.280763] env[68638]: DEBUG oslo_vmware.api [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834754, 'name': PowerOnVM_Task, 'duration_secs': 0.380853} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.281154] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1577.281300] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf31873-69e0-4e20-be32-f93b3014d490 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Updating instance '5c2ad03c-ece8-4ad1-a978-c67663628d2a' progress to 100 {{(pid=68638) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1579.474235] env[68638]: DEBUG nova.network.neutron [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Port 22aa2a41-f217-4805-90c4-ceed1349f8cb binding to destination host cpu-1 is already ACTIVE {{(pid=68638) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1579.474530] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "refresh_cache-5c2ad03c-ece8-4ad1-a978-c67663628d2a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1579.474645] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquired lock "refresh_cache-5c2ad03c-ece8-4ad1-a978-c67663628d2a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1579.474810] env[68638]: DEBUG nova.network.neutron [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1580.198456] env[68638]: DEBUG nova.network.neutron [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Updating instance_info_cache with network_info: [{"id": "22aa2a41-f217-4805-90c4-ceed1349f8cb", "address": "fa:16:3e:6f:e1:d3", "network": {"id": "1011d63b-6b94-46e7-8fb7-2f1d20628113", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2061890536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9da776668a424815986399da431ae74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22aa2a41-f2", "ovs_interfaceid": "22aa2a41-f217-4805-90c4-ceed1349f8cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1580.701401] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Releasing lock "refresh_cache-5c2ad03c-ece8-4ad1-a978-c67663628d2a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1581.205137] env[68638]: DEBUG nova.compute.manager [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=68638) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1582.298710] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1582.299065] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1582.802040] env[68638]: DEBUG nova.objects.instance [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lazy-loading 'migration_context' on Instance uuid 5c2ad03c-ece8-4ad1-a978-c67663628d2a {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1583.347514] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-376ad101-5f46-4c33-bb23-11b560071f1e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.355017] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf8ce28-f791-4cc7-bf74-362f292ab7bb {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.384220] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b736c4ce-ad37-44ba-92a9-ec7474828a96 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.390960] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1408b94d-3b6e-4cb7-afcd-6f984789c4e6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.403481] env[68638]: DEBUG nova.compute.provider_tree [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1583.906339] env[68638]: DEBUG nova.scheduler.client.report [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1584.917851] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.619s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1586.452986] env[68638]: INFO nova.compute.manager [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Swapping old allocation on dict_keys(['a03d7c1f-9953-43da-98b9-91e5cea1f9ff']) held by migration e6c5cb32-75b6-4abb-9721-802e59150514 for instance [ 1586.474915] env[68638]: DEBUG nova.scheduler.client.report [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Overwriting current allocation {'allocations': {'a03d7c1f-9953-43da-98b9-91e5cea1f9ff': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 191}}, 'project_id': '9da776668a424815986399da431ae74f', 'user_id': '1fd95244c44c44219ef19304882b99d4', 'consumer_generation': 1} on consumer 5c2ad03c-ece8-4ad1-a978-c67663628d2a {{(pid=68638) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1586.564518] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "refresh_cache-5c2ad03c-ece8-4ad1-a978-c67663628d2a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1586.564711] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquired lock "refresh_cache-5c2ad03c-ece8-4ad1-a978-c67663628d2a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1586.564890] env[68638]: DEBUG nova.network.neutron [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1587.264467] env[68638]: DEBUG nova.network.neutron [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Updating instance_info_cache with network_info: [{"id": "22aa2a41-f217-4805-90c4-ceed1349f8cb", "address": "fa:16:3e:6f:e1:d3", "network": {"id": "1011d63b-6b94-46e7-8fb7-2f1d20628113", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2061890536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9da776668a424815986399da431ae74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22aa2a41-f2", "ovs_interfaceid": "22aa2a41-f217-4805-90c4-ceed1349f8cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1587.767712] env[68638]: DEBUG oslo_concurrency.lockutils [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Releasing lock "refresh_cache-5c2ad03c-ece8-4ad1-a978-c67663628d2a" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1587.768735] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acad690e-0ddc-4d7e-9c23-b7374cacc170 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.775792] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d061be-be5e-4cbd-bf6a-3c19ec97f2c3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.856621] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1588.856967] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1b147de9-2dc3-4b9f-946c-26a18a8aa8a6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.864366] env[68638]: DEBUG oslo_vmware.api [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1588.864366] env[68638]: value = "task-2834755" [ 1588.864366] env[68638]: _type = "Task" [ 1588.864366] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.872213] env[68638]: DEBUG oslo_vmware.api [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834755, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.374172] env[68638]: DEBUG oslo_vmware.api [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834755, 'name': PowerOffVM_Task, 'duration_secs': 0.251419} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.374445] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1589.375154] env[68638]: DEBUG nova.virt.hardware [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1589.375377] env[68638]: DEBUG nova.virt.hardware [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1589.375538] env[68638]: DEBUG nova.virt.hardware [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1589.375717] env[68638]: DEBUG nova.virt.hardware [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1589.375858] env[68638]: DEBUG nova.virt.hardware [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1589.376010] env[68638]: DEBUG nova.virt.hardware [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1589.376228] env[68638]: DEBUG nova.virt.hardware [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1589.376386] env[68638]: DEBUG nova.virt.hardware [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1589.376548] env[68638]: DEBUG nova.virt.hardware [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1589.376707] env[68638]: DEBUG nova.virt.hardware [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1589.376876] env[68638]: DEBUG nova.virt.hardware [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1589.381849] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41153947-ebba-4296-a799-b470492b5c94 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.396625] env[68638]: DEBUG oslo_vmware.api [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1589.396625] env[68638]: value = "task-2834756" [ 1589.396625] env[68638]: _type = "Task" [ 1589.396625] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.403913] env[68638]: DEBUG oslo_vmware.api [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834756, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.906229] env[68638]: DEBUG oslo_vmware.api [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834756, 'name': ReconfigVM_Task, 'duration_secs': 0.139953} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.907040] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c57dbb60-c19a-4ee4-9f4a-e60b31dc4ed9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.928115] env[68638]: DEBUG nova.virt.hardware [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1589.928368] env[68638]: DEBUG nova.virt.hardware [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1589.928532] env[68638]: DEBUG nova.virt.hardware [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1589.928716] env[68638]: DEBUG nova.virt.hardware [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1589.928862] env[68638]: DEBUG nova.virt.hardware [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1589.929014] env[68638]: DEBUG nova.virt.hardware [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1589.929213] env[68638]: DEBUG nova.virt.hardware [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1589.929411] env[68638]: DEBUG nova.virt.hardware [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1589.929529] env[68638]: DEBUG nova.virt.hardware [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1589.929690] env[68638]: DEBUG nova.virt.hardware [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1589.929862] env[68638]: DEBUG nova.virt.hardware [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1589.930655] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ddc95ad-709e-4964-a353-091e81ca47f0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.936782] env[68638]: DEBUG oslo_vmware.api [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1589.936782] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ff1bbf-8da2-b257-62ec-27712af66bbd" [ 1589.936782] env[68638]: _type = "Task" [ 1589.936782] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.944260] env[68638]: DEBUG oslo_vmware.api [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ff1bbf-8da2-b257-62ec-27712af66bbd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.447627] env[68638]: DEBUG oslo_vmware.api [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ff1bbf-8da2-b257-62ec-27712af66bbd, 'name': SearchDatastore_Task, 'duration_secs': 0.00733} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.453015] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Reconfiguring VM instance instance-0000007f to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1590.453329] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54208377-fd0f-4dca-9262-1bc8f677661d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.471072] env[68638]: DEBUG oslo_vmware.api [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1590.471072] env[68638]: value = "task-2834757" [ 1590.471072] env[68638]: _type = "Task" [ 1590.471072] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.478684] env[68638]: DEBUG oslo_vmware.api [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834757, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.980720] env[68638]: DEBUG oslo_vmware.api [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834757, 'name': ReconfigVM_Task, 'duration_secs': 0.205379} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.981130] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Reconfigured VM instance instance-0000007f to detach disk 2000 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1590.981795] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69683e31-683d-4834-be31-e1a2ca40b8cc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.005480] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Reconfiguring VM instance instance-0000007f to attach disk [datastore1] 5c2ad03c-ece8-4ad1-a978-c67663628d2a/5c2ad03c-ece8-4ad1-a978-c67663628d2a.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1591.005711] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-882a3130-a60f-4295-afec-a31207336dd1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.022617] env[68638]: DEBUG oslo_vmware.api [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1591.022617] env[68638]: value = "task-2834758" [ 1591.022617] env[68638]: _type = "Task" [ 1591.022617] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.029779] env[68638]: DEBUG oslo_vmware.api [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834758, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.532167] env[68638]: DEBUG oslo_vmware.api [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834758, 'name': ReconfigVM_Task, 'duration_secs': 0.278586} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.532438] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Reconfigured VM instance instance-0000007f to attach disk [datastore1] 5c2ad03c-ece8-4ad1-a978-c67663628d2a/5c2ad03c-ece8-4ad1-a978-c67663628d2a.vmdk or device None with type thin {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1591.533275] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be72f60c-22a5-4221-89b1-5d908f8be528 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.552870] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b684e2a-6d6c-4b0b-8541-b52790955b6b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.571720] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8850e0fd-f61d-4602-8b5a-90063c7dc8fa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.591494] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7d7fff5-27a8-41a7-9678-26d8eae8920b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.597334] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1591.597538] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bae27019-c338-4853-8950-95f2f0a42d72 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.603522] env[68638]: DEBUG oslo_vmware.api [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1591.603522] env[68638]: value = "task-2834759" [ 1591.603522] env[68638]: _type = "Task" [ 1591.603522] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.610548] env[68638]: DEBUG oslo_vmware.api [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834759, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.113642] env[68638]: DEBUG oslo_vmware.api [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834759, 'name': PowerOnVM_Task, 'duration_secs': 0.352132} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.114619] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1593.154598] env[68638]: INFO nova.compute.manager [None req-ce5c7a3f-4e72-4aa4-a824-a9a7664a1298 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Updating instance to original state: 'active' [ 1594.085840] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "5c2ad03c-ece8-4ad1-a978-c67663628d2a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1594.086143] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "5c2ad03c-ece8-4ad1-a978-c67663628d2a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1594.086343] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "5c2ad03c-ece8-4ad1-a978-c67663628d2a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1594.086526] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "5c2ad03c-ece8-4ad1-a978-c67663628d2a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1594.086697] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "5c2ad03c-ece8-4ad1-a978-c67663628d2a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1594.089058] env[68638]: INFO nova.compute.manager [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Terminating instance [ 1594.593991] env[68638]: DEBUG nova.compute.manager [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1594.594374] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1594.594564] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-866b9713-0d7f-47fe-b5fb-d2d5d2f3b56a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.601760] env[68638]: DEBUG oslo_vmware.api [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1594.601760] env[68638]: value = "task-2834760" [ 1594.601760] env[68638]: _type = "Task" [ 1594.601760] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.609594] env[68638]: DEBUG oslo_vmware.api [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834760, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.112524] env[68638]: DEBUG oslo_vmware.api [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834760, 'name': PowerOffVM_Task, 'duration_secs': 0.2139} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.112788] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1595.112985] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Volume detach. Driver type: vmdk {{(pid=68638) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1595.113200] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570079', 'volume_id': 'e5a18d44-57c2-4d62-8a2c-fabe4b9fd0d5', 'name': 'volume-e5a18d44-57c2-4d62-8a2c-fabe4b9fd0d5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '5c2ad03c-ece8-4ad1-a978-c67663628d2a', 'attached_at': '2025-03-07T02:44:59.000000', 'detached_at': '', 'volume_id': 'e5a18d44-57c2-4d62-8a2c-fabe4b9fd0d5', 'serial': 'e5a18d44-57c2-4d62-8a2c-fabe4b9fd0d5'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1595.113935] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-723cd25d-70d8-426e-add0-c37b1f51047a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.134163] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-778f77f7-0451-43f0-90d5-e7974ea36599 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.140533] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60088a27-0fbc-4d6e-a2e1-ae8b43ea4f7e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.159676] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c2abaff-2b74-4ec9-869a-44ceda9ebb97 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.173356] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] The volume has not been displaced from its original location: [datastore2] volume-e5a18d44-57c2-4d62-8a2c-fabe4b9fd0d5/volume-e5a18d44-57c2-4d62-8a2c-fabe4b9fd0d5.vmdk. No consolidation needed. {{(pid=68638) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1595.178535] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Reconfiguring VM instance instance-0000007f to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1595.178765] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80995c57-ff0a-47d3-b788-80b614b20d5b {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.195390] env[68638]: DEBUG oslo_vmware.api [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1595.195390] env[68638]: value = "task-2834761" [ 1595.195390] env[68638]: _type = "Task" [ 1595.195390] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.203114] env[68638]: DEBUG oslo_vmware.api [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834761, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.704774] env[68638]: DEBUG oslo_vmware.api [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834761, 'name': ReconfigVM_Task, 'duration_secs': 0.19769} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.705141] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Reconfigured VM instance instance-0000007f to detach disk 2001 {{(pid=68638) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1595.709576] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c32426a-d7ad-4ac0-97a9-c93227430bd2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.723667] env[68638]: DEBUG oslo_vmware.api [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1595.723667] env[68638]: value = "task-2834762" [ 1595.723667] env[68638]: _type = "Task" [ 1595.723667] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.731537] env[68638]: DEBUG oslo_vmware.api [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834762, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.233894] env[68638]: DEBUG oslo_vmware.api [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834762, 'name': ReconfigVM_Task, 'duration_secs': 0.129519} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.234211] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-570079', 'volume_id': 'e5a18d44-57c2-4d62-8a2c-fabe4b9fd0d5', 'name': 'volume-e5a18d44-57c2-4d62-8a2c-fabe4b9fd0d5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '5c2ad03c-ece8-4ad1-a978-c67663628d2a', 'attached_at': '2025-03-07T02:44:59.000000', 'detached_at': '', 'volume_id': 'e5a18d44-57c2-4d62-8a2c-fabe4b9fd0d5', 'serial': 'e5a18d44-57c2-4d62-8a2c-fabe4b9fd0d5'} {{(pid=68638) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1596.234534] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1596.235291] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-237193b2-08e9-43d1-a577-7345af82e896 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.241810] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1596.242033] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-71af896d-f4da-4cef-9348-792d33afe8a2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.097449] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1597.097823] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1597.097864] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Deleting the datastore file [datastore1] 5c2ad03c-ece8-4ad1-a978-c67663628d2a {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1597.098171] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d6f51e1-d45e-49b1-ba1d-d54312a7fa52 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.104457] env[68638]: DEBUG oslo_vmware.api [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1597.104457] env[68638]: value = "task-2834764" [ 1597.104457] env[68638]: _type = "Task" [ 1597.104457] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.111844] env[68638]: DEBUG oslo_vmware.api [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834764, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.615904] env[68638]: DEBUG oslo_vmware.api [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834764, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17334} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.616130] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1597.616315] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1597.616484] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1597.616654] env[68638]: INFO nova.compute.manager [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Took 3.02 seconds to destroy the instance on the hypervisor. [ 1597.616886] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1597.617080] env[68638]: DEBUG nova.compute.manager [-] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1597.617177] env[68638]: DEBUG nova.network.neutron [-] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1598.036039] env[68638]: DEBUG nova.compute.manager [req-a3f2a42d-fea6-4c5c-96dd-4a75980777f9 req-c1f40123-cfe0-40ee-8aca-5108e3230835 service nova] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Received event network-vif-deleted-22aa2a41-f217-4805-90c4-ceed1349f8cb {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1598.036039] env[68638]: INFO nova.compute.manager [req-a3f2a42d-fea6-4c5c-96dd-4a75980777f9 req-c1f40123-cfe0-40ee-8aca-5108e3230835 service nova] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Neutron deleted interface 22aa2a41-f217-4805-90c4-ceed1349f8cb; detaching it from the instance and deleting it from the info cache [ 1598.036039] env[68638]: DEBUG nova.network.neutron [req-a3f2a42d-fea6-4c5c-96dd-4a75980777f9 req-c1f40123-cfe0-40ee-8aca-5108e3230835 service nova] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1598.519145] env[68638]: DEBUG nova.network.neutron [-] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1598.538250] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aa8b7704-02ad-49fb-bc33-cdaeac9e84cc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.548070] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f74be4b-baba-45c3-9f3c-c1f612d4d9b8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.570856] env[68638]: DEBUG nova.compute.manager [req-a3f2a42d-fea6-4c5c-96dd-4a75980777f9 req-c1f40123-cfe0-40ee-8aca-5108e3230835 service nova] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Detach interface failed, port_id=22aa2a41-f217-4805-90c4-ceed1349f8cb, reason: Instance 5c2ad03c-ece8-4ad1-a978-c67663628d2a could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1599.022488] env[68638]: INFO nova.compute.manager [-] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Took 1.41 seconds to deallocate network for instance. [ 1599.567355] env[68638]: INFO nova.compute.manager [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: 5c2ad03c-ece8-4ad1-a978-c67663628d2a] Took 0.54 seconds to detach 1 volumes for instance. [ 1600.074324] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1600.074614] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1600.074806] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1600.095864] env[68638]: INFO nova.scheduler.client.report [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Deleted allocations for instance 5c2ad03c-ece8-4ad1-a978-c67663628d2a [ 1600.603499] env[68638]: DEBUG oslo_concurrency.lockutils [None req-c2896c12-205c-40d8-ae64-597717b427bb tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "5c2ad03c-ece8-4ad1-a978-c67663628d2a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.517s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1601.192008] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "bf5bc09e-36a2-41de-8295-86f68007403c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1601.192243] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "bf5bc09e-36a2-41de-8295-86f68007403c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1601.694503] env[68638]: DEBUG nova.compute.manager [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Starting instance... {{(pid=68638) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1602.218236] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1602.218571] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1602.220256] env[68638]: INFO nova.compute.claims [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1603.256044] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15504655-90ab-4322-8628-9cb72f3e171d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.263674] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a10be98-c016-4d5c-a5a1-eb69c492d832 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.292696] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef88baa-1733-436a-976b-00fee3405e11 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.299277] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d0d84d-224c-4f56-823d-d8e7bc583b0f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.311509] env[68638]: DEBUG nova.compute.provider_tree [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1603.815222] env[68638]: DEBUG nova.scheduler.client.report [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1604.322068] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.103s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1604.322853] env[68638]: DEBUG nova.compute.manager [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Start building networks asynchronously for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1604.829987] env[68638]: DEBUG nova.compute.utils [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Using /dev/sd instead of None {{(pid=68638) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1604.831761] env[68638]: DEBUG nova.compute.manager [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Allocating IP information in the background. {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1604.831954] env[68638]: DEBUG nova.network.neutron [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] allocate_for_instance() {{(pid=68638) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1604.884816] env[68638]: DEBUG nova.policy [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1fd95244c44c44219ef19304882b99d4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9da776668a424815986399da431ae74f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68638) authorize /opt/stack/nova/nova/policy.py:192}} [ 1605.154858] env[68638]: DEBUG nova.network.neutron [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Successfully created port: ff8cda7c-3e63-4855-9c4b-a9d9713d889f {{(pid=68638) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1605.335050] env[68638]: DEBUG nova.compute.manager [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Start building block device mappings for instance. {{(pid=68638) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1605.701431] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1605.701700] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1605.701874] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1605.702051] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1605.702224] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1605.702360] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68638) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1606.346703] env[68638]: DEBUG nova.compute.manager [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Start spawning the instance on the hypervisor. {{(pid=68638) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1606.373966] env[68638]: DEBUG nova.virt.hardware [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-07T02:25:55Z,direct_url=,disk_format='vmdk',id=ef1ae417-fdc1-452d-9e5d-ced4149ebfe9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='22dd1a47cca5452a966546749e7b8700',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-07T02:25:56Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1606.374235] env[68638]: DEBUG nova.virt.hardware [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1606.374396] env[68638]: DEBUG nova.virt.hardware [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1606.374579] env[68638]: DEBUG nova.virt.hardware [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1606.374730] env[68638]: DEBUG nova.virt.hardware [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1606.374874] env[68638]: DEBUG nova.virt.hardware [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1606.375092] env[68638]: DEBUG nova.virt.hardware [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1606.375255] env[68638]: DEBUG nova.virt.hardware [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1606.375420] env[68638]: DEBUG nova.virt.hardware [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1606.375578] env[68638]: DEBUG nova.virt.hardware [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1606.375750] env[68638]: DEBUG nova.virt.hardware [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1606.376615] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4b9e5bc-8074-43b9-b405-e500bfc12737 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.384255] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a02bdcce-b3a8-4f3c-9f3c-7844fae0cd61 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.490510] env[68638]: DEBUG nova.compute.manager [req-a7843cd7-1b03-42a2-86bb-c8b1523a2533 req-1e8c0a62-731b-42d1-a444-e98a6a0e1a09 service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Received event network-vif-plugged-ff8cda7c-3e63-4855-9c4b-a9d9713d889f {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1606.490736] env[68638]: DEBUG oslo_concurrency.lockutils [req-a7843cd7-1b03-42a2-86bb-c8b1523a2533 req-1e8c0a62-731b-42d1-a444-e98a6a0e1a09 service nova] Acquiring lock "bf5bc09e-36a2-41de-8295-86f68007403c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1606.490939] env[68638]: DEBUG oslo_concurrency.lockutils [req-a7843cd7-1b03-42a2-86bb-c8b1523a2533 req-1e8c0a62-731b-42d1-a444-e98a6a0e1a09 service nova] Lock "bf5bc09e-36a2-41de-8295-86f68007403c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1606.491126] env[68638]: DEBUG oslo_concurrency.lockutils [req-a7843cd7-1b03-42a2-86bb-c8b1523a2533 req-1e8c0a62-731b-42d1-a444-e98a6a0e1a09 service nova] Lock "bf5bc09e-36a2-41de-8295-86f68007403c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1606.491293] env[68638]: DEBUG nova.compute.manager [req-a7843cd7-1b03-42a2-86bb-c8b1523a2533 req-1e8c0a62-731b-42d1-a444-e98a6a0e1a09 service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] No waiting events found dispatching network-vif-plugged-ff8cda7c-3e63-4855-9c4b-a9d9713d889f {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1606.491545] env[68638]: WARNING nova.compute.manager [req-a7843cd7-1b03-42a2-86bb-c8b1523a2533 req-1e8c0a62-731b-42d1-a444-e98a6a0e1a09 service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Received unexpected event network-vif-plugged-ff8cda7c-3e63-4855-9c4b-a9d9713d889f for instance with vm_state building and task_state spawning. [ 1606.790643] env[68638]: DEBUG nova.network.neutron [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Successfully updated port: ff8cda7c-3e63-4855-9c4b-a9d9713d889f {{(pid=68638) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1607.294184] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "refresh_cache-bf5bc09e-36a2-41de-8295-86f68007403c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1607.295424] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquired lock "refresh_cache-bf5bc09e-36a2-41de-8295-86f68007403c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1607.295424] env[68638]: DEBUG nova.network.neutron [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1607.336256] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1607.339772] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager.update_available_resource {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1607.825139] env[68638]: DEBUG nova.network.neutron [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Instance cache missing network info. {{(pid=68638) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1607.842434] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1607.842677] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1607.842837] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1607.843011] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68638) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1607.843863] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8cdd16-822f-464d-ad2b-6ad427593071 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.851768] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4299b34e-7052-427e-8b6e-0a690f2a4d15 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.868019] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-029ea042-3fd0-42b6-8162-b03198d7d22e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.874284] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b802aaf3-d1d5-47e0-9f96-398869217ff6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.903506] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181035MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=68638) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1607.903647] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1607.903856] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1607.964886] env[68638]: DEBUG nova.network.neutron [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Updating instance_info_cache with network_info: [{"id": "ff8cda7c-3e63-4855-9c4b-a9d9713d889f", "address": "fa:16:3e:ad:13:33", "network": {"id": "1011d63b-6b94-46e7-8fb7-2f1d20628113", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2061890536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9da776668a424815986399da431ae74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff8cda7c-3e", "ovs_interfaceid": "ff8cda7c-3e63-4855-9c4b-a9d9713d889f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1608.468056] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Releasing lock "refresh_cache-bf5bc09e-36a2-41de-8295-86f68007403c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1608.468056] env[68638]: DEBUG nova.compute.manager [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Instance network_info: |[{"id": "ff8cda7c-3e63-4855-9c4b-a9d9713d889f", "address": "fa:16:3e:ad:13:33", "network": {"id": "1011d63b-6b94-46e7-8fb7-2f1d20628113", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2061890536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9da776668a424815986399da431ae74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff8cda7c-3e", "ovs_interfaceid": "ff8cda7c-3e63-4855-9c4b-a9d9713d889f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68638) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1608.468425] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:13:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ded18042-834c-4792-b3e8-b1c377446432', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff8cda7c-3e63-4855-9c4b-a9d9713d889f', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1608.476107] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1608.476330] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1608.476554] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1ebfbc3c-4a0d-400c-9d11-898ab90b91ba {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.498305] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1608.498305] env[68638]: value = "task-2834765" [ 1608.498305] env[68638]: _type = "Task" [ 1608.498305] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.505856] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834765, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.514496] env[68638]: DEBUG nova.compute.manager [req-c246e538-b058-4e58-8ebb-21f7def45802 req-14237c9d-16be-4f10-a9a6-009902d094eb service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Received event network-changed-ff8cda7c-3e63-4855-9c4b-a9d9713d889f {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1608.514680] env[68638]: DEBUG nova.compute.manager [req-c246e538-b058-4e58-8ebb-21f7def45802 req-14237c9d-16be-4f10-a9a6-009902d094eb service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Refreshing instance network info cache due to event network-changed-ff8cda7c-3e63-4855-9c4b-a9d9713d889f. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1608.514886] env[68638]: DEBUG oslo_concurrency.lockutils [req-c246e538-b058-4e58-8ebb-21f7def45802 req-14237c9d-16be-4f10-a9a6-009902d094eb service nova] Acquiring lock "refresh_cache-bf5bc09e-36a2-41de-8295-86f68007403c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1608.515040] env[68638]: DEBUG oslo_concurrency.lockutils [req-c246e538-b058-4e58-8ebb-21f7def45802 req-14237c9d-16be-4f10-a9a6-009902d094eb service nova] Acquired lock "refresh_cache-bf5bc09e-36a2-41de-8295-86f68007403c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1608.515200] env[68638]: DEBUG nova.network.neutron [req-c246e538-b058-4e58-8ebb-21f7def45802 req-14237c9d-16be-4f10-a9a6-009902d094eb service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Refreshing network info cache for port ff8cda7c-3e63-4855-9c4b-a9d9713d889f {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1608.929366] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Instance bf5bc09e-36a2-41de-8295-86f68007403c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68638) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1608.929626] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1608.929753] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1608.954676] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f32aaf9-09ec-4cdb-a86d-be7ddebecd28 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.961898] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf52b1fe-fc1a-4e9a-b58d-60d72f36b02c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.992171] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f584d6f-3c39-4692-a046-19bba8039a22 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.998843] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c89b303-f683-4fb8-8930-a2a6886b2fc1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.014170] env[68638]: DEBUG nova.compute.provider_tree [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1609.019638] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834765, 'name': CreateVM_Task, 'duration_secs': 0.272846} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.020008] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1609.020820] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1609.020979] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1609.021317] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1609.022077] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-578e98cf-b4c0-4265-99c0-16f2004fab97 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.026422] env[68638]: DEBUG oslo_vmware.api [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1609.026422] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52310fe9-b555-be8b-767d-20a92f84bd27" [ 1609.026422] env[68638]: _type = "Task" [ 1609.026422] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.034407] env[68638]: DEBUG oslo_vmware.api [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52310fe9-b555-be8b-767d-20a92f84bd27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.185660] env[68638]: DEBUG nova.network.neutron [req-c246e538-b058-4e58-8ebb-21f7def45802 req-14237c9d-16be-4f10-a9a6-009902d094eb service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Updated VIF entry in instance network info cache for port ff8cda7c-3e63-4855-9c4b-a9d9713d889f. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1609.186286] env[68638]: DEBUG nova.network.neutron [req-c246e538-b058-4e58-8ebb-21f7def45802 req-14237c9d-16be-4f10-a9a6-009902d094eb service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Updating instance_info_cache with network_info: [{"id": "ff8cda7c-3e63-4855-9c4b-a9d9713d889f", "address": "fa:16:3e:ad:13:33", "network": {"id": "1011d63b-6b94-46e7-8fb7-2f1d20628113", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2061890536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9da776668a424815986399da431ae74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff8cda7c-3e", "ovs_interfaceid": "ff8cda7c-3e63-4855-9c4b-a9d9713d889f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1609.521274] env[68638]: DEBUG nova.scheduler.client.report [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1609.535838] env[68638]: DEBUG oslo_vmware.api [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52310fe9-b555-be8b-767d-20a92f84bd27, 'name': SearchDatastore_Task, 'duration_secs': 0.009595} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.536723] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1609.536953] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Processing image ef1ae417-fdc1-452d-9e5d-ced4149ebfe9 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1609.537201] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1609.537347] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1609.537523] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1609.537978] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6101067d-c8e4-4577-908a-2e8fc63edc8d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.546085] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1609.546256] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1609.547143] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd69b499-d1a9-46d8-ad6e-ad007482b46a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.551842] env[68638]: DEBUG oslo_vmware.api [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1609.551842] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]521569a1-5bcb-c79c-20ac-8ce551cbe381" [ 1609.551842] env[68638]: _type = "Task" [ 1609.551842] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.558924] env[68638]: DEBUG oslo_vmware.api [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]521569a1-5bcb-c79c-20ac-8ce551cbe381, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.688911] env[68638]: DEBUG oslo_concurrency.lockutils [req-c246e538-b058-4e58-8ebb-21f7def45802 req-14237c9d-16be-4f10-a9a6-009902d094eb service nova] Releasing lock "refresh_cache-bf5bc09e-36a2-41de-8295-86f68007403c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1610.025787] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68638) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1610.026122] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.122s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1610.063453] env[68638]: DEBUG oslo_vmware.api [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]521569a1-5bcb-c79c-20ac-8ce551cbe381, 'name': SearchDatastore_Task, 'duration_secs': 0.007339} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.064218] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a044f3b0-eeaf-466b-980d-dc42a308ea02 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.068875] env[68638]: DEBUG oslo_vmware.api [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1610.068875] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52f7752b-1472-6744-9e42-5652073407f4" [ 1610.068875] env[68638]: _type = "Task" [ 1610.068875] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.075977] env[68638]: DEBUG oslo_vmware.api [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f7752b-1472-6744-9e42-5652073407f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.579951] env[68638]: DEBUG oslo_vmware.api [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52f7752b-1472-6744-9e42-5652073407f4, 'name': SearchDatastore_Task, 'duration_secs': 0.009951} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.580241] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1610.580477] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] bf5bc09e-36a2-41de-8295-86f68007403c/bf5bc09e-36a2-41de-8295-86f68007403c.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1610.580733] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9a49b693-a665-47e7-9d05-5269f7688871 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.587427] env[68638]: DEBUG oslo_vmware.api [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1610.587427] env[68638]: value = "task-2834766" [ 1610.587427] env[68638]: _type = "Task" [ 1610.587427] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.594391] env[68638]: DEBUG oslo_vmware.api [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834766, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.026753] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1611.096829] env[68638]: DEBUG oslo_vmware.api [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834766, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474865} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.097141] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9/ef1ae417-fdc1-452d-9e5d-ced4149ebfe9.vmdk to [datastore1] bf5bc09e-36a2-41de-8295-86f68007403c/bf5bc09e-36a2-41de-8295-86f68007403c.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1611.097354] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Extending root virtual disk to 1048576 {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1611.097594] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7a909cb1-aa6e-440d-80c8-5e0fa97258f8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.102817] env[68638]: DEBUG oslo_vmware.api [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1611.102817] env[68638]: value = "task-2834767" [ 1611.102817] env[68638]: _type = "Task" [ 1611.102817] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.110373] env[68638]: DEBUG oslo_vmware.api [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834767, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.612304] env[68638]: DEBUG oslo_vmware.api [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834767, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.054018} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.612562] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Extended root virtual disk {{(pid=68638) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1611.613341] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7b9bad4-ff15-49de-8a83-2ef1c47feb7e {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.634408] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Reconfiguring VM instance instance-00000080 to attach disk [datastore1] bf5bc09e-36a2-41de-8295-86f68007403c/bf5bc09e-36a2-41de-8295-86f68007403c.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1611.634588] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d751bd6-1be0-4985-a743-a90a1f86d2fe {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.652668] env[68638]: DEBUG oslo_vmware.api [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1611.652668] env[68638]: value = "task-2834768" [ 1611.652668] env[68638]: _type = "Task" [ 1611.652668] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.659675] env[68638]: DEBUG oslo_vmware.api [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834768, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.164201] env[68638]: DEBUG oslo_vmware.api [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834768, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.662681] env[68638]: DEBUG oslo_vmware.api [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834768, 'name': ReconfigVM_Task, 'duration_secs': 0.756973} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.663333] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Reconfigured VM instance instance-00000080 to attach disk [datastore1] bf5bc09e-36a2-41de-8295-86f68007403c/bf5bc09e-36a2-41de-8295-86f68007403c.vmdk or device None with type sparse {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1612.665895] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bb5a996c-e69b-43c1-920f-92e96096f301 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.674017] env[68638]: DEBUG oslo_vmware.api [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1612.674017] env[68638]: value = "task-2834769" [ 1612.674017] env[68638]: _type = "Task" [ 1612.674017] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.678937] env[68638]: DEBUG oslo_vmware.api [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834769, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.182135] env[68638]: DEBUG oslo_vmware.api [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834769, 'name': Rename_Task, 'duration_secs': 0.134326} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.182135] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1613.182135] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c07e9ade-4d58-4846-a607-209e0b74000c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.188090] env[68638]: DEBUG oslo_vmware.api [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1613.188090] env[68638]: value = "task-2834770" [ 1613.188090] env[68638]: _type = "Task" [ 1613.188090] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.196326] env[68638]: DEBUG oslo_vmware.api [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834770, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.696862] env[68638]: DEBUG oslo_vmware.api [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834770, 'name': PowerOnVM_Task, 'duration_secs': 0.418448} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.697158] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1613.697367] env[68638]: INFO nova.compute.manager [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Took 7.35 seconds to spawn the instance on the hypervisor. [ 1613.697558] env[68638]: DEBUG nova.compute.manager [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1613.698330] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7541d6be-fa1a-45eb-a52d-05b91aafca11 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.219491] env[68638]: INFO nova.compute.manager [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Took 12.02 seconds to build instance. [ 1614.356727] env[68638]: DEBUG nova.compute.manager [req-3ec4ef0c-0975-46f7-89a9-8849599ae3fc req-db004ccd-be3f-4714-bf84-70e17dd2aeaa service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Received event network-changed-ff8cda7c-3e63-4855-9c4b-a9d9713d889f {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1614.356928] env[68638]: DEBUG nova.compute.manager [req-3ec4ef0c-0975-46f7-89a9-8849599ae3fc req-db004ccd-be3f-4714-bf84-70e17dd2aeaa service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Refreshing instance network info cache due to event network-changed-ff8cda7c-3e63-4855-9c4b-a9d9713d889f. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1614.357168] env[68638]: DEBUG oslo_concurrency.lockutils [req-3ec4ef0c-0975-46f7-89a9-8849599ae3fc req-db004ccd-be3f-4714-bf84-70e17dd2aeaa service nova] Acquiring lock "refresh_cache-bf5bc09e-36a2-41de-8295-86f68007403c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1614.357312] env[68638]: DEBUG oslo_concurrency.lockutils [req-3ec4ef0c-0975-46f7-89a9-8849599ae3fc req-db004ccd-be3f-4714-bf84-70e17dd2aeaa service nova] Acquired lock "refresh_cache-bf5bc09e-36a2-41de-8295-86f68007403c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1614.357469] env[68638]: DEBUG nova.network.neutron [req-3ec4ef0c-0975-46f7-89a9-8849599ae3fc req-db004ccd-be3f-4714-bf84-70e17dd2aeaa service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Refreshing network info cache for port ff8cda7c-3e63-4855-9c4b-a9d9713d889f {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1614.721959] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2df6e9f3-43ca-4e43-bce9-db08b5c428a4 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "bf5bc09e-36a2-41de-8295-86f68007403c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.529s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1615.062458] env[68638]: DEBUG nova.network.neutron [req-3ec4ef0c-0975-46f7-89a9-8849599ae3fc req-db004ccd-be3f-4714-bf84-70e17dd2aeaa service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Updated VIF entry in instance network info cache for port ff8cda7c-3e63-4855-9c4b-a9d9713d889f. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1615.062838] env[68638]: DEBUG nova.network.neutron [req-3ec4ef0c-0975-46f7-89a9-8849599ae3fc req-db004ccd-be3f-4714-bf84-70e17dd2aeaa service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Updating instance_info_cache with network_info: [{"id": "ff8cda7c-3e63-4855-9c4b-a9d9713d889f", "address": "fa:16:3e:ad:13:33", "network": {"id": "1011d63b-6b94-46e7-8fb7-2f1d20628113", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2061890536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9da776668a424815986399da431ae74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff8cda7c-3e", "ovs_interfaceid": "ff8cda7c-3e63-4855-9c4b-a9d9713d889f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1615.565834] env[68638]: DEBUG oslo_concurrency.lockutils [req-3ec4ef0c-0975-46f7-89a9-8849599ae3fc req-db004ccd-be3f-4714-bf84-70e17dd2aeaa service nova] Releasing lock "refresh_cache-bf5bc09e-36a2-41de-8295-86f68007403c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1626.336452] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1652.160463] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "bf5bc09e-36a2-41de-8295-86f68007403c" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1652.160853] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "bf5bc09e-36a2-41de-8295-86f68007403c" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1652.160853] env[68638]: INFO nova.compute.manager [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Shelving [ 1653.170942] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1653.171652] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bd69805c-93e5-4804-af34-385f69bbaa16 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.179556] env[68638]: DEBUG oslo_vmware.api [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1653.179556] env[68638]: value = "task-2834771" [ 1653.179556] env[68638]: _type = "Task" [ 1653.179556] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.186759] env[68638]: DEBUG oslo_vmware.api [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834771, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.689479] env[68638]: DEBUG oslo_vmware.api [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834771, 'name': PowerOffVM_Task, 'duration_secs': 0.183521} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.689766] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1653.690550] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe44c87-b19c-41ad-9363-5bd41efc57e7 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.708240] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf1b4e7f-9afe-4b3e-97dd-fbebd46f7137 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.217420] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Creating Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1654.217771] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7dc98f5e-6e0a-45be-87c1-e8ec936aa87d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.225350] env[68638]: DEBUG oslo_vmware.api [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1654.225350] env[68638]: value = "task-2834772" [ 1654.225350] env[68638]: _type = "Task" [ 1654.225350] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.232844] env[68638]: DEBUG oslo_vmware.api [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834772, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.736020] env[68638]: DEBUG oslo_vmware.api [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834772, 'name': CreateSnapshot_Task, 'duration_secs': 0.409949} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.736305] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Created Snapshot of the VM instance {{(pid=68638) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1654.737045] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45c9eba4-c91a-426c-9034-2bdb728ff5f4 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.254729] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Creating linked-clone VM from snapshot {{(pid=68638) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1655.255156] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-00b96591-b18a-4d24-8015-121b56bcf7f2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.264814] env[68638]: DEBUG oslo_vmware.api [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1655.264814] env[68638]: value = "task-2834773" [ 1655.264814] env[68638]: _type = "Task" [ 1655.264814] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.272514] env[68638]: DEBUG oslo_vmware.api [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834773, 'name': CloneVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.774129] env[68638]: DEBUG oslo_vmware.api [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834773, 'name': CloneVM_Task} progress is 94%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.275272] env[68638]: DEBUG oslo_vmware.api [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834773, 'name': CloneVM_Task, 'duration_secs': 0.870304} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.275608] env[68638]: INFO nova.virt.vmwareapi.vmops [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Created linked-clone VM from snapshot [ 1656.276286] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-539a3c6d-0475-41d2-986e-2cc53510532c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.282995] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Uploading image 1b34e1f7-5df9-428b-95dd-f893515b6a10 {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1656.303014] env[68638]: DEBUG oslo_vmware.rw_handles [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1656.303014] env[68638]: value = "vm-570082" [ 1656.303014] env[68638]: _type = "VirtualMachine" [ 1656.303014] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1656.303246] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f30e6b49-239f-412f-8bf0-bd46122d32b3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.309384] env[68638]: DEBUG oslo_vmware.rw_handles [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lease: (returnval){ [ 1656.309384] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52b57020-96e0-d5e4-6c2b-ab3c8d7f97e5" [ 1656.309384] env[68638]: _type = "HttpNfcLease" [ 1656.309384] env[68638]: } obtained for exporting VM: (result){ [ 1656.309384] env[68638]: value = "vm-570082" [ 1656.309384] env[68638]: _type = "VirtualMachine" [ 1656.309384] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1656.309614] env[68638]: DEBUG oslo_vmware.api [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the lease: (returnval){ [ 1656.309614] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52b57020-96e0-d5e4-6c2b-ab3c8d7f97e5" [ 1656.309614] env[68638]: _type = "HttpNfcLease" [ 1656.309614] env[68638]: } to be ready. {{(pid=68638) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1656.316734] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1656.316734] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52b57020-96e0-d5e4-6c2b-ab3c8d7f97e5" [ 1656.316734] env[68638]: _type = "HttpNfcLease" [ 1656.316734] env[68638]: } is initializing. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1656.817431] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1656.817431] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52b57020-96e0-d5e4-6c2b-ab3c8d7f97e5" [ 1656.817431] env[68638]: _type = "HttpNfcLease" [ 1656.817431] env[68638]: } is ready. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1656.817712] env[68638]: DEBUG oslo_vmware.rw_handles [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1656.817712] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52b57020-96e0-d5e4-6c2b-ab3c8d7f97e5" [ 1656.817712] env[68638]: _type = "HttpNfcLease" [ 1656.817712] env[68638]: }. {{(pid=68638) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1656.818406] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e88d9c9-07fb-4c58-905b-69cb73b89d80 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.825699] env[68638]: DEBUG oslo_vmware.rw_handles [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521a8ac7-5e89-e892-56fe-8a3b28452ae3/disk-0.vmdk from lease info. {{(pid=68638) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1656.825864] env[68638]: DEBUG oslo_vmware.rw_handles [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521a8ac7-5e89-e892-56fe-8a3b28452ae3/disk-0.vmdk for reading. {{(pid=68638) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1656.913516] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-400550a0-46c5-43e6-9174-bcf6667682df {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.339919] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1663.340330] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1664.241840] env[68638]: DEBUG oslo_vmware.rw_handles [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521a8ac7-5e89-e892-56fe-8a3b28452ae3/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1664.242808] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8528200-56a0-4f2b-b002-b594d0fdd052 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.248873] env[68638]: DEBUG oslo_vmware.rw_handles [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521a8ac7-5e89-e892-56fe-8a3b28452ae3/disk-0.vmdk is in state: ready. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1664.248997] env[68638]: ERROR oslo_vmware.rw_handles [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521a8ac7-5e89-e892-56fe-8a3b28452ae3/disk-0.vmdk due to incomplete transfer. [ 1664.249218] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c191214f-c4ef-4377-a696-509248b8b4d6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.256318] env[68638]: DEBUG oslo_vmware.rw_handles [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521a8ac7-5e89-e892-56fe-8a3b28452ae3/disk-0.vmdk. {{(pid=68638) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1664.256513] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Uploaded image 1b34e1f7-5df9-428b-95dd-f893515b6a10 to the Glance image server {{(pid=68638) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1664.258877] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Destroying the VM {{(pid=68638) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1664.259113] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d91f2166-a510-4593-8b47-c445a91846d8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.264149] env[68638]: DEBUG oslo_vmware.api [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1664.264149] env[68638]: value = "task-2834775" [ 1664.264149] env[68638]: _type = "Task" [ 1664.264149] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.271653] env[68638]: DEBUG oslo_vmware.api [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834775, 'name': Destroy_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.340485] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1664.340879] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1664.340879] env[68638]: DEBUG nova.compute.manager [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68638) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1664.775299] env[68638]: DEBUG oslo_vmware.api [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834775, 'name': Destroy_Task, 'duration_secs': 0.316372} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.775560] env[68638]: INFO nova.virt.vmwareapi.vm_util [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Destroyed the VM [ 1664.775805] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Deleting Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1664.776060] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-7ab87088-966b-423c-bde5-fefbd5336efc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.781810] env[68638]: DEBUG oslo_vmware.api [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1664.781810] env[68638]: value = "task-2834776" [ 1664.781810] env[68638]: _type = "Task" [ 1664.781810] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.788889] env[68638]: DEBUG oslo_vmware.api [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834776, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.292079] env[68638]: DEBUG oslo_vmware.api [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834776, 'name': RemoveSnapshot_Task, 'duration_secs': 0.348199} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.292335] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Deleted Snapshot of the VM instance {{(pid=68638) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1665.292664] env[68638]: DEBUG nova.compute.manager [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1665.293436] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b737fd-f390-46bc-8da8-4d04379e6d5d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.340788] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1665.804576] env[68638]: INFO nova.compute.manager [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Shelve offloading [ 1666.308077] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1666.308215] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26c6f117-841b-4d87-b333-ce78c521ab8a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.315428] env[68638]: DEBUG oslo_vmware.api [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1666.315428] env[68638]: value = "task-2834777" [ 1666.315428] env[68638]: _type = "Task" [ 1666.315428] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.323010] env[68638]: DEBUG oslo_vmware.api [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834777, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.826149] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] VM already powered off {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1666.826424] env[68638]: DEBUG nova.compute.manager [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1666.827176] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd59f97a-c40a-4f3f-9275-51592a0b4f0a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.832599] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "refresh_cache-bf5bc09e-36a2-41de-8295-86f68007403c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1666.832758] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquired lock "refresh_cache-bf5bc09e-36a2-41de-8295-86f68007403c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1666.832924] env[68638]: DEBUG nova.network.neutron [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1667.335614] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1667.517798] env[68638]: DEBUG nova.network.neutron [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Updating instance_info_cache with network_info: [{"id": "ff8cda7c-3e63-4855-9c4b-a9d9713d889f", "address": "fa:16:3e:ad:13:33", "network": {"id": "1011d63b-6b94-46e7-8fb7-2f1d20628113", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2061890536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9da776668a424815986399da431ae74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff8cda7c-3e", "ovs_interfaceid": "ff8cda7c-3e63-4855-9c4b-a9d9713d889f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1668.020285] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Releasing lock "refresh_cache-bf5bc09e-36a2-41de-8295-86f68007403c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1668.211488] env[68638]: DEBUG nova.compute.manager [req-4dbec951-8fc0-4cc2-8edd-bccbf12a8e03 req-06b1e80e-d586-4060-aa0b-a632982f79b6 service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Received event network-vif-unplugged-ff8cda7c-3e63-4855-9c4b-a9d9713d889f {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1668.211719] env[68638]: DEBUG oslo_concurrency.lockutils [req-4dbec951-8fc0-4cc2-8edd-bccbf12a8e03 req-06b1e80e-d586-4060-aa0b-a632982f79b6 service nova] Acquiring lock "bf5bc09e-36a2-41de-8295-86f68007403c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1668.211922] env[68638]: DEBUG oslo_concurrency.lockutils [req-4dbec951-8fc0-4cc2-8edd-bccbf12a8e03 req-06b1e80e-d586-4060-aa0b-a632982f79b6 service nova] Lock "bf5bc09e-36a2-41de-8295-86f68007403c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1668.212104] env[68638]: DEBUG oslo_concurrency.lockutils [req-4dbec951-8fc0-4cc2-8edd-bccbf12a8e03 req-06b1e80e-d586-4060-aa0b-a632982f79b6 service nova] Lock "bf5bc09e-36a2-41de-8295-86f68007403c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1668.212307] env[68638]: DEBUG nova.compute.manager [req-4dbec951-8fc0-4cc2-8edd-bccbf12a8e03 req-06b1e80e-d586-4060-aa0b-a632982f79b6 service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] No waiting events found dispatching network-vif-unplugged-ff8cda7c-3e63-4855-9c4b-a9d9713d889f {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1668.212486] env[68638]: WARNING nova.compute.manager [req-4dbec951-8fc0-4cc2-8edd-bccbf12a8e03 req-06b1e80e-d586-4060-aa0b-a632982f79b6 service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Received unexpected event network-vif-unplugged-ff8cda7c-3e63-4855-9c4b-a9d9713d889f for instance with vm_state shelved and task_state shelving_offloading. [ 1668.308353] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1668.309288] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a34be2df-3402-47aa-a86d-e18a82f81e87 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.316831] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1668.317083] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-020fb22d-09cf-4249-80e7-ed81d42298d5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.340149] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1668.340348] env[68638]: DEBUG oslo_service.periodic_task [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Running periodic task ComputeManager.update_available_resource {{(pid=68638) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1669.014804] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1669.015060] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1669.015256] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1669.015379] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68638) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1669.015677] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1669.015854] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Deleting contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1669.016030] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Deleting the datastore file [datastore1] bf5bc09e-36a2-41de-8295-86f68007403c {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1669.016998] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f426632f-2089-4606-bd0f-0e21a78b5fc2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.019565] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9571fc50-e775-429c-84dc-97d797c680f3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.027434] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2750b257-e801-4791-b822-31ec3c502db2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.031080] env[68638]: DEBUG oslo_vmware.api [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1669.031080] env[68638]: value = "task-2834779" [ 1669.031080] env[68638]: _type = "Task" [ 1669.031080] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.042157] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aca2c35-7ba9-409f-971c-398d88fe6c09 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.046789] env[68638]: DEBUG oslo_vmware.api [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834779, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.050752] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0195843c-ff23-43fc-ba14-fa86271dff38 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.080243] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180900MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=68638) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1669.080411] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1669.080618] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1669.541401] env[68638]: DEBUG oslo_vmware.api [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834779, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.121944} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.541663] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1669.541860] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Deleted contents of the VM from datastore datastore1 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1669.542046] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1669.564014] env[68638]: INFO nova.scheduler.client.report [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Deleted allocations for instance bf5bc09e-36a2-41de-8295-86f68007403c [ 1670.070050] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1670.099109] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1670.099280] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=68638) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1670.111990] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba5dc6b-528f-4f46-89b9-13fa5409e9ee {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.119076] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ab442ea-e659-4bb8-9504-e622c0d50aa1 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.147786] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5acfb37b-a2ca-4995-b245-b1cf0d9c46aa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.154370] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d0bbe3-f10a-4ed3-adf9-bca6a48de487 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.166753] env[68638]: DEBUG nova.compute.provider_tree [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1670.240763] env[68638]: DEBUG nova.compute.manager [req-a9b11afa-d322-48cc-8ce9-5beda41e30cb req-1257d33f-6c57-40a8-b154-6d6d4088c8a1 service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Received event network-changed-ff8cda7c-3e63-4855-9c4b-a9d9713d889f {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1670.240949] env[68638]: DEBUG nova.compute.manager [req-a9b11afa-d322-48cc-8ce9-5beda41e30cb req-1257d33f-6c57-40a8-b154-6d6d4088c8a1 service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Refreshing instance network info cache due to event network-changed-ff8cda7c-3e63-4855-9c4b-a9d9713d889f. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1670.241186] env[68638]: DEBUG oslo_concurrency.lockutils [req-a9b11afa-d322-48cc-8ce9-5beda41e30cb req-1257d33f-6c57-40a8-b154-6d6d4088c8a1 service nova] Acquiring lock "refresh_cache-bf5bc09e-36a2-41de-8295-86f68007403c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1670.241331] env[68638]: DEBUG oslo_concurrency.lockutils [req-a9b11afa-d322-48cc-8ce9-5beda41e30cb req-1257d33f-6c57-40a8-b154-6d6d4088c8a1 service nova] Acquired lock "refresh_cache-bf5bc09e-36a2-41de-8295-86f68007403c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1670.241496] env[68638]: DEBUG nova.network.neutron [req-a9b11afa-d322-48cc-8ce9-5beda41e30cb req-1257d33f-6c57-40a8-b154-6d6d4088c8a1 service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Refreshing network info cache for port ff8cda7c-3e63-4855-9c4b-a9d9713d889f {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1670.669412] env[68638]: DEBUG nova.scheduler.client.report [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1670.966353] env[68638]: DEBUG nova.network.neutron [req-a9b11afa-d322-48cc-8ce9-5beda41e30cb req-1257d33f-6c57-40a8-b154-6d6d4088c8a1 service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Updated VIF entry in instance network info cache for port ff8cda7c-3e63-4855-9c4b-a9d9713d889f. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1670.966715] env[68638]: DEBUG nova.network.neutron [req-a9b11afa-d322-48cc-8ce9-5beda41e30cb req-1257d33f-6c57-40a8-b154-6d6d4088c8a1 service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Updating instance_info_cache with network_info: [{"id": "ff8cda7c-3e63-4855-9c4b-a9d9713d889f", "address": "fa:16:3e:ad:13:33", "network": {"id": "1011d63b-6b94-46e7-8fb7-2f1d20628113", "bridge": null, "label": "tempest-ServerActionsTestOtherB-2061890536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9da776668a424815986399da431ae74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapff8cda7c-3e", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1671.174388] env[68638]: DEBUG nova.compute.resource_tracker [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68638) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1671.174678] env[68638]: DEBUG oslo_concurrency.lockutils [None req-f9f1ecff-b825-4566-b5be-3b1770cb352e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.094s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1671.174905] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.105s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1671.175187] env[68638]: DEBUG nova.objects.instance [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lazy-loading 'resources' on Instance uuid bf5bc09e-36a2-41de-8295-86f68007403c {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1671.469395] env[68638]: DEBUG oslo_concurrency.lockutils [req-a9b11afa-d322-48cc-8ce9-5beda41e30cb req-1257d33f-6c57-40a8-b154-6d6d4088c8a1 service nova] Releasing lock "refresh_cache-bf5bc09e-36a2-41de-8295-86f68007403c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1671.677734] env[68638]: DEBUG nova.objects.instance [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lazy-loading 'numa_topology' on Instance uuid bf5bc09e-36a2-41de-8295-86f68007403c {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1671.954418] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "bf5bc09e-36a2-41de-8295-86f68007403c" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1672.180874] env[68638]: DEBUG nova.objects.base [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=68638) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1672.209841] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc462f8-9e45-4d24-a07f-3c86ee3a49dd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.216982] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2182e233-3675-45d0-b0cc-41a28d67c947 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.246154] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f25b031-780c-4b2f-9bf0-c9417b27703c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.252524] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8de308cc-9bfc-49a2-9271-868ae4e401c5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.264859] env[68638]: DEBUG nova.compute.provider_tree [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1672.767701] env[68638]: DEBUG nova.scheduler.client.report [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1673.273510] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.098s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1673.781438] env[68638]: DEBUG oslo_concurrency.lockutils [None req-2cbfd201-64dc-4372-b328-f0282935b76d tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "bf5bc09e-36a2-41de-8295-86f68007403c" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 21.621s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1673.782287] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "bf5bc09e-36a2-41de-8295-86f68007403c" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.828s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1673.782525] env[68638]: INFO nova.compute.manager [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Unshelving [ 1674.803518] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1674.803871] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1674.804098] env[68638]: DEBUG nova.objects.instance [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lazy-loading 'pci_requests' on Instance uuid bf5bc09e-36a2-41de-8295-86f68007403c {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1675.307837] env[68638]: DEBUG nova.objects.instance [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lazy-loading 'numa_topology' on Instance uuid bf5bc09e-36a2-41de-8295-86f68007403c {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1675.810291] env[68638]: INFO nova.compute.claims [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1676.843279] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-035f4ed5-0484-4c7c-8cc6-083ff50e8095 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.850481] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2caa4131-cd8f-4da4-b81a-8b7053ffc2e6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.879820] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b78f01a1-bf77-4e9d-8940-ac7527f0d3e5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.886410] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43302fb-bf22-46bd-9329-9614f8893517 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.898721] env[68638]: DEBUG nova.compute.provider_tree [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Inventory has not changed in ProviderTree for provider: a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1677.401604] env[68638]: DEBUG nova.scheduler.client.report [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Inventory has not changed for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1677.906923] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.103s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1677.936354] env[68638]: INFO nova.network.neutron [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Updating port ff8cda7c-3e63-4855-9c4b-a9d9713d889f with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1679.285132] env[68638]: DEBUG nova.compute.manager [req-26bf90e6-58cb-45e2-8657-767a363ffdb8 req-083e0b02-7a4b-49ff-ac95-6e5847cea44a service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Received event network-vif-plugged-ff8cda7c-3e63-4855-9c4b-a9d9713d889f {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1679.285132] env[68638]: DEBUG oslo_concurrency.lockutils [req-26bf90e6-58cb-45e2-8657-767a363ffdb8 req-083e0b02-7a4b-49ff-ac95-6e5847cea44a service nova] Acquiring lock "bf5bc09e-36a2-41de-8295-86f68007403c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1679.285132] env[68638]: DEBUG oslo_concurrency.lockutils [req-26bf90e6-58cb-45e2-8657-767a363ffdb8 req-083e0b02-7a4b-49ff-ac95-6e5847cea44a service nova] Lock "bf5bc09e-36a2-41de-8295-86f68007403c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1679.285132] env[68638]: DEBUG oslo_concurrency.lockutils [req-26bf90e6-58cb-45e2-8657-767a363ffdb8 req-083e0b02-7a4b-49ff-ac95-6e5847cea44a service nova] Lock "bf5bc09e-36a2-41de-8295-86f68007403c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1679.285132] env[68638]: DEBUG nova.compute.manager [req-26bf90e6-58cb-45e2-8657-767a363ffdb8 req-083e0b02-7a4b-49ff-ac95-6e5847cea44a service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] No waiting events found dispatching network-vif-plugged-ff8cda7c-3e63-4855-9c4b-a9d9713d889f {{(pid=68638) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1679.285132] env[68638]: WARNING nova.compute.manager [req-26bf90e6-58cb-45e2-8657-767a363ffdb8 req-083e0b02-7a4b-49ff-ac95-6e5847cea44a service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Received unexpected event network-vif-plugged-ff8cda7c-3e63-4855-9c4b-a9d9713d889f for instance with vm_state shelved_offloaded and task_state spawning. [ 1679.358939] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "refresh_cache-bf5bc09e-36a2-41de-8295-86f68007403c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1679.358939] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquired lock "refresh_cache-bf5bc09e-36a2-41de-8295-86f68007403c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1679.359204] env[68638]: DEBUG nova.network.neutron [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Building network info cache for instance {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1680.048936] env[68638]: DEBUG nova.network.neutron [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Updating instance_info_cache with network_info: [{"id": "ff8cda7c-3e63-4855-9c4b-a9d9713d889f", "address": "fa:16:3e:ad:13:33", "network": {"id": "1011d63b-6b94-46e7-8fb7-2f1d20628113", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2061890536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9da776668a424815986399da431ae74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff8cda7c-3e", "ovs_interfaceid": "ff8cda7c-3e63-4855-9c4b-a9d9713d889f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1680.552327] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Releasing lock "refresh_cache-bf5bc09e-36a2-41de-8295-86f68007403c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1680.579678] env[68638]: DEBUG nova.virt.hardware [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-07T02:26:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='bfb66c65048fc335c741cc8610b9f85e',container_format='bare',created_at=2025-03-07T02:46:03Z,direct_url=,disk_format='vmdk',id=1b34e1f7-5df9-428b-95dd-f893515b6a10,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-1871628456-shelved',owner='9da776668a424815986399da431ae74f',properties=ImageMetaProps,protected=,size=31669248,status='active',tags=,updated_at=2025-03-07T02:46:16Z,virtual_size=,visibility=), allow threads: False {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1680.579920] env[68638]: DEBUG nova.virt.hardware [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Flavor limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1680.580090] env[68638]: DEBUG nova.virt.hardware [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Image limits 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1680.580279] env[68638]: DEBUG nova.virt.hardware [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Flavor pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1680.580448] env[68638]: DEBUG nova.virt.hardware [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Image pref 0:0:0 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1680.580613] env[68638]: DEBUG nova.virt.hardware [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68638) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1680.580818] env[68638]: DEBUG nova.virt.hardware [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1680.580974] env[68638]: DEBUG nova.virt.hardware [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1680.581161] env[68638]: DEBUG nova.virt.hardware [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Got 1 possible topologies {{(pid=68638) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1680.581321] env[68638]: DEBUG nova.virt.hardware [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1680.581491] env[68638]: DEBUG nova.virt.hardware [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68638) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1680.582351] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-390cf992-745b-4841-b3fe-4800962ba4de {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.590188] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86907d0-536b-40c4-a5f0-6da30dee50fc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.603184] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:13:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ded18042-834c-4792-b3e8-b1c377446432', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff8cda7c-3e63-4855-9c4b-a9d9713d889f', 'vif_model': 'vmxnet3'}] {{(pid=68638) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1680.610449] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1680.610674] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Creating VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1680.610879] env[68638]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c46bd84-c78d-43dd-8588-21d419e867e3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.630296] env[68638]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1680.630296] env[68638]: value = "task-2834780" [ 1680.630296] env[68638]: _type = "Task" [ 1680.630296] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.637744] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834780, 'name': CreateVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.140753] env[68638]: DEBUG oslo_vmware.api [-] Task: {'id': task-2834780, 'name': CreateVM_Task, 'duration_secs': 0.2878} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.140997] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Created VM on the ESX host {{(pid=68638) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1681.141564] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1b34e1f7-5df9-428b-95dd-f893515b6a10" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1681.141732] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1b34e1f7-5df9-428b-95dd-f893515b6a10" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1681.142127] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1b34e1f7-5df9-428b-95dd-f893515b6a10" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1681.142385] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82118948-9d9f-42e1-9afc-e3430c819105 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.156162] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1681.156162] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]528a2362-1f2b-6dfe-60a5-1a3d7b9ac118" [ 1681.156162] env[68638]: _type = "Task" [ 1681.156162] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.163046] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]528a2362-1f2b-6dfe-60a5-1a3d7b9ac118, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.308277] env[68638]: DEBUG nova.compute.manager [req-6a46c820-226a-44ac-90e9-1ca8f47f4bdc req-8cf8031b-0cd3-4465-94dd-b61c6d8b5cfd service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Received event network-changed-ff8cda7c-3e63-4855-9c4b-a9d9713d889f {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1681.308457] env[68638]: DEBUG nova.compute.manager [req-6a46c820-226a-44ac-90e9-1ca8f47f4bdc req-8cf8031b-0cd3-4465-94dd-b61c6d8b5cfd service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Refreshing instance network info cache due to event network-changed-ff8cda7c-3e63-4855-9c4b-a9d9713d889f. {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1681.308587] env[68638]: DEBUG oslo_concurrency.lockutils [req-6a46c820-226a-44ac-90e9-1ca8f47f4bdc req-8cf8031b-0cd3-4465-94dd-b61c6d8b5cfd service nova] Acquiring lock "refresh_cache-bf5bc09e-36a2-41de-8295-86f68007403c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1681.308730] env[68638]: DEBUG oslo_concurrency.lockutils [req-6a46c820-226a-44ac-90e9-1ca8f47f4bdc req-8cf8031b-0cd3-4465-94dd-b61c6d8b5cfd service nova] Acquired lock "refresh_cache-bf5bc09e-36a2-41de-8295-86f68007403c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1681.308889] env[68638]: DEBUG nova.network.neutron [req-6a46c820-226a-44ac-90e9-1ca8f47f4bdc req-8cf8031b-0cd3-4465-94dd-b61c6d8b5cfd service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Refreshing network info cache for port ff8cda7c-3e63-4855-9c4b-a9d9713d889f {{(pid=68638) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1681.666013] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1b34e1f7-5df9-428b-95dd-f893515b6a10" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1681.666371] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Processing image 1b34e1f7-5df9-428b-95dd-f893515b6a10 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1681.666518] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1b34e1f7-5df9-428b-95dd-f893515b6a10/1b34e1f7-5df9-428b-95dd-f893515b6a10.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1681.666665] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1b34e1f7-5df9-428b-95dd-f893515b6a10/1b34e1f7-5df9-428b-95dd-f893515b6a10.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1681.666842] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1681.667093] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4737e14-6cee-4807-9607-4c2a4621625a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.675312] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1681.675471] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68638) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1681.676125] env[68638]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f82e8698-e172-4340-a05c-0a3efdeaa9e3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.680749] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1681.680749] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]52ff6c50-f72f-2fd0-91cd-49057c2a6c8f" [ 1681.680749] env[68638]: _type = "Task" [ 1681.680749] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.687523] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': session[5267461d-1849-2a3b-78fe-5543790e1404]52ff6c50-f72f-2fd0-91cd-49057c2a6c8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.032704] env[68638]: DEBUG nova.network.neutron [req-6a46c820-226a-44ac-90e9-1ca8f47f4bdc req-8cf8031b-0cd3-4465-94dd-b61c6d8b5cfd service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Updated VIF entry in instance network info cache for port ff8cda7c-3e63-4855-9c4b-a9d9713d889f. {{(pid=68638) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1682.033083] env[68638]: DEBUG nova.network.neutron [req-6a46c820-226a-44ac-90e9-1ca8f47f4bdc req-8cf8031b-0cd3-4465-94dd-b61c6d8b5cfd service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Updating instance_info_cache with network_info: [{"id": "ff8cda7c-3e63-4855-9c4b-a9d9713d889f", "address": "fa:16:3e:ad:13:33", "network": {"id": "1011d63b-6b94-46e7-8fb7-2f1d20628113", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2061890536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9da776668a424815986399da431ae74f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff8cda7c-3e", "ovs_interfaceid": "ff8cda7c-3e63-4855-9c4b-a9d9713d889f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1682.190491] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Preparing fetch location {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1682.190745] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Fetch image to [datastore2] OSTACK_IMG_ceacf71d-ccba-4ce1-b780-f104f7858b43/OSTACK_IMG_ceacf71d-ccba-4ce1-b780-f104f7858b43.vmdk {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1682.190923] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Downloading stream optimized image 1b34e1f7-5df9-428b-95dd-f893515b6a10 to [datastore2] OSTACK_IMG_ceacf71d-ccba-4ce1-b780-f104f7858b43/OSTACK_IMG_ceacf71d-ccba-4ce1-b780-f104f7858b43.vmdk on the data store datastore2 as vApp {{(pid=68638) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1682.191104] env[68638]: DEBUG nova.virt.vmwareapi.images [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Downloading image file data 1b34e1f7-5df9-428b-95dd-f893515b6a10 to the ESX as VM named 'OSTACK_IMG_ceacf71d-ccba-4ce1-b780-f104f7858b43' {{(pid=68638) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1682.254564] env[68638]: DEBUG oslo_vmware.rw_handles [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1682.254564] env[68638]: value = "resgroup-9" [ 1682.254564] env[68638]: _type = "ResourcePool" [ 1682.254564] env[68638]: }. {{(pid=68638) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1682.254901] env[68638]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-7556b9d4-855e-47aa-9bb3-63cec8ba2f63 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.274804] env[68638]: DEBUG oslo_vmware.rw_handles [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lease: (returnval){ [ 1682.274804] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]529d88e6-45d0-35c2-6006-5420be5d481e" [ 1682.274804] env[68638]: _type = "HttpNfcLease" [ 1682.274804] env[68638]: } obtained for vApp import into resource pool (val){ [ 1682.274804] env[68638]: value = "resgroup-9" [ 1682.274804] env[68638]: _type = "ResourcePool" [ 1682.274804] env[68638]: }. {{(pid=68638) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1682.275308] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the lease: (returnval){ [ 1682.275308] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]529d88e6-45d0-35c2-6006-5420be5d481e" [ 1682.275308] env[68638]: _type = "HttpNfcLease" [ 1682.275308] env[68638]: } to be ready. {{(pid=68638) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1682.283752] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1682.283752] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]529d88e6-45d0-35c2-6006-5420be5d481e" [ 1682.283752] env[68638]: _type = "HttpNfcLease" [ 1682.283752] env[68638]: } is initializing. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1682.535895] env[68638]: DEBUG oslo_concurrency.lockutils [req-6a46c820-226a-44ac-90e9-1ca8f47f4bdc req-8cf8031b-0cd3-4465-94dd-b61c6d8b5cfd service nova] Releasing lock "refresh_cache-bf5bc09e-36a2-41de-8295-86f68007403c" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1682.783184] env[68638]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1682.783184] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]529d88e6-45d0-35c2-6006-5420be5d481e" [ 1682.783184] env[68638]: _type = "HttpNfcLease" [ 1682.783184] env[68638]: } is ready. {{(pid=68638) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1682.783704] env[68638]: DEBUG oslo_vmware.rw_handles [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1682.783704] env[68638]: value = "session[5267461d-1849-2a3b-78fe-5543790e1404]529d88e6-45d0-35c2-6006-5420be5d481e" [ 1682.783704] env[68638]: _type = "HttpNfcLease" [ 1682.783704] env[68638]: }. {{(pid=68638) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1682.784185] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1a931c-ed96-4b22-b6d5-1f57510d7b25 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.791036] env[68638]: DEBUG oslo_vmware.rw_handles [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cf9792-4a9d-fe57-fa9d-ba46ec273219/disk-0.vmdk from lease info. {{(pid=68638) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1682.791184] env[68638]: DEBUG oslo_vmware.rw_handles [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Creating HTTP connection to write to file with size = 31669248 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cf9792-4a9d-fe57-fa9d-ba46ec273219/disk-0.vmdk. {{(pid=68638) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1682.853285] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d65138af-7918-4eb7-8819-ae6fc537b71d {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.063020] env[68638]: DEBUG oslo_vmware.rw_handles [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Completed reading data from the image iterator. {{(pid=68638) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1684.063020] env[68638]: DEBUG oslo_vmware.rw_handles [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cf9792-4a9d-fe57-fa9d-ba46ec273219/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1684.063868] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afe8a183-99ea-423c-8e3f-34958d96438c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.070790] env[68638]: DEBUG oslo_vmware.rw_handles [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cf9792-4a9d-fe57-fa9d-ba46ec273219/disk-0.vmdk is in state: ready. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1684.070956] env[68638]: DEBUG oslo_vmware.rw_handles [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cf9792-4a9d-fe57-fa9d-ba46ec273219/disk-0.vmdk. {{(pid=68638) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1684.071214] env[68638]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-765beb2e-6b27-47e5-957b-78ffee4d9888 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.266349] env[68638]: DEBUG oslo_vmware.rw_handles [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cf9792-4a9d-fe57-fa9d-ba46ec273219/disk-0.vmdk. {{(pid=68638) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1684.266595] env[68638]: INFO nova.virt.vmwareapi.images [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Downloaded image file data 1b34e1f7-5df9-428b-95dd-f893515b6a10 [ 1684.267536] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f29c44cd-4477-437d-bbee-d34dfd0a3ab9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.284413] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fa4fbfd8-40f0-4d67-b7d5-0cfd0ef111bd {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.311432] env[68638]: INFO nova.virt.vmwareapi.images [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] The imported VM was unregistered [ 1684.313564] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Caching image {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1684.313791] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Creating directory with path [datastore2] devstack-image-cache_base/1b34e1f7-5df9-428b-95dd-f893515b6a10 {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1684.314045] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-386b7c3e-fe39-487b-82af-89eeca6446c9 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.323684] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Created directory with path [datastore2] devstack-image-cache_base/1b34e1f7-5df9-428b-95dd-f893515b6a10 {{(pid=68638) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1684.323856] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_ceacf71d-ccba-4ce1-b780-f104f7858b43/OSTACK_IMG_ceacf71d-ccba-4ce1-b780-f104f7858b43.vmdk to [datastore2] devstack-image-cache_base/1b34e1f7-5df9-428b-95dd-f893515b6a10/1b34e1f7-5df9-428b-95dd-f893515b6a10.vmdk. {{(pid=68638) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1684.324084] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-06b55c0e-682a-4166-bfac-085fc46505e5 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.330317] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1684.330317] env[68638]: value = "task-2834783" [ 1684.330317] env[68638]: _type = "Task" [ 1684.330317] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.337906] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834783, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.846447] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834783, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.345053] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834783, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.848423] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834783, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.346076] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834783, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.843477] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834783, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.329907} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.843817] env[68638]: INFO nova.virt.vmwareapi.ds_util [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_ceacf71d-ccba-4ce1-b780-f104f7858b43/OSTACK_IMG_ceacf71d-ccba-4ce1-b780-f104f7858b43.vmdk to [datastore2] devstack-image-cache_base/1b34e1f7-5df9-428b-95dd-f893515b6a10/1b34e1f7-5df9-428b-95dd-f893515b6a10.vmdk. [ 1686.844015] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Cleaning up location [datastore2] OSTACK_IMG_ceacf71d-ccba-4ce1-b780-f104f7858b43 {{(pid=68638) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1686.844189] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_ceacf71d-ccba-4ce1-b780-f104f7858b43 {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1686.844418] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8f42ec3f-9874-48e4-899a-6c7dfe26961a {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.850824] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1686.850824] env[68638]: value = "task-2834784" [ 1686.850824] env[68638]: _type = "Task" [ 1686.850824] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.857576] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834784, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.361125] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834784, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033936} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.361493] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1687.361586] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1b34e1f7-5df9-428b-95dd-f893515b6a10/1b34e1f7-5df9-428b-95dd-f893515b6a10.vmdk" {{(pid=68638) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1687.361788] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/1b34e1f7-5df9-428b-95dd-f893515b6a10/1b34e1f7-5df9-428b-95dd-f893515b6a10.vmdk to [datastore2] bf5bc09e-36a2-41de-8295-86f68007403c/bf5bc09e-36a2-41de-8295-86f68007403c.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1687.362043] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6b54fa89-1ea9-4215-9fc6-2f8fce7ce628 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.369381] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1687.369381] env[68638]: value = "task-2834785" [ 1687.369381] env[68638]: _type = "Task" [ 1687.369381] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.376157] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834785, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.880904] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834785, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.382701] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834785, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.882164] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834785, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.385313] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834785, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.883545] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834785, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.154272} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.883769] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/1b34e1f7-5df9-428b-95dd-f893515b6a10/1b34e1f7-5df9-428b-95dd-f893515b6a10.vmdk to [datastore2] bf5bc09e-36a2-41de-8295-86f68007403c/bf5bc09e-36a2-41de-8295-86f68007403c.vmdk {{(pid=68638) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1689.884417] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c26e670-caa5-40bc-ba24-2cf1143fc774 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.905330] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Reconfiguring VM instance instance-00000080 to attach disk [datastore2] bf5bc09e-36a2-41de-8295-86f68007403c/bf5bc09e-36a2-41de-8295-86f68007403c.vmdk or device None with type streamOptimized {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1689.905454] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3b72910-3d67-4f53-8c86-757e3b600172 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.923867] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1689.923867] env[68638]: value = "task-2834786" [ 1689.923867] env[68638]: _type = "Task" [ 1689.923867] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.932774] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834786, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.433744] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834786, 'name': ReconfigVM_Task, 'duration_secs': 0.255516} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.434188] env[68638]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Reconfigured VM instance instance-00000080 to attach disk [datastore2] bf5bc09e-36a2-41de-8295-86f68007403c/bf5bc09e-36a2-41de-8295-86f68007403c.vmdk or device None with type streamOptimized {{(pid=68638) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1690.434678] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f868aac5-e341-4e5f-8db7-24fcd3413fe3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.440054] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1690.440054] env[68638]: value = "task-2834787" [ 1690.440054] env[68638]: _type = "Task" [ 1690.440054] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.446962] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834787, 'name': Rename_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.949678] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834787, 'name': Rename_Task, 'duration_secs': 0.142934} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.949927] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Powering on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1690.950157] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-64bd7a95-889b-4aac-a2cc-322e89b3acad {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.956017] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1690.956017] env[68638]: value = "task-2834788" [ 1690.956017] env[68638]: _type = "Task" [ 1690.956017] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.962824] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834788, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.466543] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834788, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.966878] env[68638]: DEBUG oslo_vmware.api [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834788, 'name': PowerOnVM_Task, 'duration_secs': 0.983868} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.967173] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Powered on the VM {{(pid=68638) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1692.057323] env[68638]: DEBUG nova.compute.manager [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Checking state {{(pid=68638) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1692.058271] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c11d77fc-ab48-4236-b945-e72eac286837 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.574556] env[68638]: DEBUG oslo_concurrency.lockutils [None req-6c695331-01ea-45bb-a1d0-ab072f80dc8a tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "bf5bc09e-36a2-41de-8295-86f68007403c" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 18.792s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1693.697277] env[68638]: DEBUG oslo_concurrency.lockutils [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "bf5bc09e-36a2-41de-8295-86f68007403c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1693.697641] env[68638]: DEBUG oslo_concurrency.lockutils [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "bf5bc09e-36a2-41de-8295-86f68007403c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1693.697838] env[68638]: DEBUG oslo_concurrency.lockutils [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "bf5bc09e-36a2-41de-8295-86f68007403c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1693.698027] env[68638]: DEBUG oslo_concurrency.lockutils [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "bf5bc09e-36a2-41de-8295-86f68007403c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1693.698215] env[68638]: DEBUG oslo_concurrency.lockutils [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "bf5bc09e-36a2-41de-8295-86f68007403c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1693.700311] env[68638]: INFO nova.compute.manager [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Terminating instance [ 1694.205942] env[68638]: DEBUG nova.compute.manager [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Start destroying the instance on the hypervisor. {{(pid=68638) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1694.206198] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Destroying instance {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1694.207289] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-772ce70f-9c3f-455b-a7b9-a026c96212f8 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.215306] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Powering off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1694.215519] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-47495213-72b5-466a-b77b-fa9318b86f5c {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.221903] env[68638]: DEBUG oslo_vmware.api [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1694.221903] env[68638]: value = "task-2834789" [ 1694.221903] env[68638]: _type = "Task" [ 1694.221903] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.229541] env[68638]: DEBUG oslo_vmware.api [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834789, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.734887] env[68638]: DEBUG oslo_vmware.api [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834789, 'name': PowerOffVM_Task, 'duration_secs': 0.182942} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.734887] env[68638]: DEBUG nova.virt.vmwareapi.vm_util [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Powered off the VM {{(pid=68638) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1694.734887] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Unregistering the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1694.734887] env[68638]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ab2b85a-35a6-494f-b819-b305ef499a15 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.790696] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Unregistered the VM {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1694.791099] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Deleting contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1694.791337] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Deleting the datastore file [datastore2] bf5bc09e-36a2-41de-8295-86f68007403c {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1694.791666] env[68638]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a7ebbdee-422f-4eae-aad7-3523c1a11baa {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.800237] env[68638]: DEBUG oslo_vmware.api [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for the task: (returnval){ [ 1694.800237] env[68638]: value = "task-2834791" [ 1694.800237] env[68638]: _type = "Task" [ 1694.800237] env[68638]: } to complete. {{(pid=68638) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.807445] env[68638]: DEBUG oslo_vmware.api [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834791, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.310096] env[68638]: DEBUG oslo_vmware.api [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834791, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.809932] env[68638]: DEBUG oslo_vmware.api [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Task: {'id': task-2834791, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.586729} completed successfully. {{(pid=68638) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.810302] env[68638]: DEBUG nova.virt.vmwareapi.ds_util [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Deleted the datastore file {{(pid=68638) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1695.810416] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Deleted contents of the VM from datastore datastore2 {{(pid=68638) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1695.810610] env[68638]: DEBUG nova.virt.vmwareapi.vmops [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Instance destroyed {{(pid=68638) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1695.810778] env[68638]: INFO nova.compute.manager [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Took 1.60 seconds to destroy the instance on the hypervisor. [ 1695.811019] env[68638]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68638) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1695.811213] env[68638]: DEBUG nova.compute.manager [-] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Deallocating network for instance {{(pid=68638) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1695.811308] env[68638]: DEBUG nova.network.neutron [-] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] deallocate_for_instance() {{(pid=68638) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1696.232077] env[68638]: DEBUG nova.compute.manager [req-2fb5f77c-93d7-4b96-ba05-220e1eaa524a req-89e215ca-8c94-4fd8-ac85-93c39b8adc3c service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Received event network-vif-deleted-ff8cda7c-3e63-4855-9c4b-a9d9713d889f {{(pid=68638) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1696.232285] env[68638]: INFO nova.compute.manager [req-2fb5f77c-93d7-4b96-ba05-220e1eaa524a req-89e215ca-8c94-4fd8-ac85-93c39b8adc3c service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Neutron deleted interface ff8cda7c-3e63-4855-9c4b-a9d9713d889f; detaching it from the instance and deleting it from the info cache [ 1696.232455] env[68638]: DEBUG nova.network.neutron [req-2fb5f77c-93d7-4b96-ba05-220e1eaa524a req-89e215ca-8c94-4fd8-ac85-93c39b8adc3c service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1696.715817] env[68638]: DEBUG nova.network.neutron [-] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Updating instance_info_cache with network_info: [] {{(pid=68638) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1696.735304] env[68638]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-213d08e8-6b8e-407b-b5f4-07de1f2640da {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.744937] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35bc8a8b-7edb-4904-95eb-499d86c25bd0 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.768983] env[68638]: DEBUG nova.compute.manager [req-2fb5f77c-93d7-4b96-ba05-220e1eaa524a req-89e215ca-8c94-4fd8-ac85-93c39b8adc3c service nova] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Detach interface failed, port_id=ff8cda7c-3e63-4855-9c4b-a9d9713d889f, reason: Instance bf5bc09e-36a2-41de-8295-86f68007403c could not be found. {{(pid=68638) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1697.218621] env[68638]: INFO nova.compute.manager [-] [instance: bf5bc09e-36a2-41de-8295-86f68007403c] Took 1.41 seconds to deallocate network for instance. [ 1697.724808] env[68638]: DEBUG oslo_concurrency.lockutils [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1697.725110] env[68638]: DEBUG oslo_concurrency.lockutils [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1697.725335] env[68638]: DEBUG nova.objects.instance [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lazy-loading 'resources' on Instance uuid bf5bc09e-36a2-41de-8295-86f68007403c {{(pid=68638) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1698.260298] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4de3f66-5fd7-4f2a-85b6-4f2e2291af91 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.268484] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d23123ad-7e2c-4dc7-be31-62a25d9526dc {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.298500] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92fd112f-a40e-4532-8cd6-8bc3ed9bcdd2 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.305344] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d094bb-097d-4680-b595-3ef6387a6c51 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.317794] env[68638]: DEBUG nova.compute.provider_tree [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1698.837382] env[68638]: ERROR nova.scheduler.client.report [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] [req-0826800b-6b28-4e1d-ad68-2b0720613d31] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID a03d7c1f-9953-43da-98b9-91e5cea1f9ff. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0826800b-6b28-4e1d-ad68-2b0720613d31"}]} [ 1698.855053] env[68638]: DEBUG nova.scheduler.client.report [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Refreshing inventories for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1698.866954] env[68638]: DEBUG nova.scheduler.client.report [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Updating ProviderTree inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1698.867182] env[68638]: DEBUG nova.compute.provider_tree [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1698.876377] env[68638]: DEBUG nova.scheduler.client.report [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Refreshing aggregate associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, aggregates: None {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1698.892673] env[68638]: DEBUG nova.scheduler.client.report [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Refreshing trait associations for resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NODE {{(pid=68638) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1698.914595] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8022efd-aa8c-4c1b-8aef-0c218dcf21b6 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.922939] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3488cee-5b9d-4ebf-a770-c15e0ea82ce3 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.951628] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef1f33a6-4c8e-413e-9d62-58e4063c2a8f {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.958897] env[68638]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea97b89-34c8-4a4e-95d3-4d73b7c2a831 {{(pid=68638) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.972926] env[68638]: DEBUG nova.compute.provider_tree [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1699.503415] env[68638]: DEBUG nova.scheduler.client.report [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Updated inventory for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with generation 194 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1699.503709] env[68638]: DEBUG nova.compute.provider_tree [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Updating resource provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff generation from 194 to 195 during operation: update_inventory {{(pid=68638) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1699.503852] env[68638]: DEBUG nova.compute.provider_tree [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Updating inventory in ProviderTree for provider a03d7c1f-9953-43da-98b9-91e5cea1f9ff with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68638) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1700.008564] env[68638]: DEBUG oslo_concurrency.lockutils [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.283s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1700.029355] env[68638]: INFO nova.scheduler.client.report [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Deleted allocations for instance bf5bc09e-36a2-41de-8295-86f68007403c [ 1700.537739] env[68638]: DEBUG oslo_concurrency.lockutils [None req-40f73b4c-b8e6-4cbe-8627-19c47ee29e69 tempest-ServerActionsTestOtherB-1025956578 tempest-ServerActionsTestOtherB-1025956578-project-member] Lock "bf5bc09e-36a2-41de-8295-86f68007403c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.840s {{(pid=68638) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}}